diff --git a/Externals.cfg b/Externals.cfg index 45b075d619..7f92ef5b9a 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -1,36 +1,45 @@ -[ufs_utils_develop] -branch = dtc/develop +[ufs_utils] protocol = git repo_url = https://github.com/NCAR/UFS_UTILS +# Specify either a branch name or a hash but not both. +#branch = dtc/develop +hash = 22d9e6ad local_path = sorc/UFS_UTILS_develop required = True -[ufs_utils_chgres_grib2] +[ufs_utils_chgres] protocol = git repo_url = https://github.com/NCAR/UFS_UTILS -#Working hash of feature/chgres_grib2 branch -hash = c29174e +# Specify either a branch name or a hash but not both. +#branch = feature/chgres_grib2 +hash = b47bc84c local_path = sorc/UFS_UTILS_chgres_grib2 required = True -[NEMSfv3gfs] -branch = gmtb/develop +[ufs_weather_model] protocol = git -repo_url = https://github.com/NCAR/NEMSfv3gfs -local_path = sorc/NEMSfv3gfs +repo_url = https://github.com/NCAR/ufs-weather-model +# Specify either a branch name or a hash but not both. +#branch = dtc/develop +hash = 4c2d541b +local_path = sorc/ufs_weather_model required = True [EMC_post] -branch = develop protocol = git repo_url = https://github.com/NOAA-EMC/EMC_post +# Specify either a branch name or a hash but not both. +#branch = develop +hash = 78078f62 local_path = sorc/EMC_post required = True #[gsi] -#branch = regional #protocol = git #repo_url = gerrit:ProdGSI +## Specify either a branch name or a hash but not both. +#branch = regional +##hash = #local_path = sorc/regional_gsi.fd #required = True diff --git a/README.md b/README.md index 0a01fc3ac7..bd9e9ef805 100644 --- a/README.md +++ b/README.md @@ -10,16 +10,12 @@ This is the community\_develop branch of the regional\_workflow used to run the This step will checkout EMC\_post, NEMSfv3gfs and its submodules, UFS\_UTILS\_chgres\_grib2 and UFS\_UTILS\_develop in the sorc directory. -2. Build the utilities: +2. Build the utilities, post and FV3: ``` -cd regional -./build_regional theia >& out.build_regional -``` -3. Build FV3: -``` -cd sorc/NEMSfv3gfs/tests -compile.sh $BASEDIR/regional_workflow/sorc/NEMSfv3gfs/FV3 theia.intel "CCPP=Y STATIC=N 32BIT=Y REPRO=Y" >& out.compile_32bit +cd sorc +./build_all.sh ``` +This step will also copy the executables to the `exec` directory and link the fix files. 4. Create a `config.sh` file in the `ush` directory (see Users Guide). 5. Generate a workflow: ``` diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 3b934a92b3..5b447d7af2 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -4,17 +4,19 @@ #----------------------------------------------------------------------- # # This script gets either from the system directory or from mass store -# (HPSS) the files generated by the external model (specified by the va- -# riable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the +# (HPSS) the files generated by the external model (specified by the +# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the # lateral boundary conditions (LBCs). Which of these we are considering -# depends on the value of the variable ICS_OR_LBCS. Also, when we -# refer to ICs, we are also referring to the surface fields and the 0-th -# hour LBC, and when we refer to LBCs, we are referring to the LBCs ex- -# cluding the one at 0-th hour. If considering ICs, this script places -# these external model files in a subdirectory under the one specified -# by the variable EXTRN_MDL_FILES_BASEDIR_ICS, and if considering -# LBCs, it places the files in a subdirectory under the one specified by -# the variable EXTRN_MDL_FILES_BASEDIR_LBCS. +# depends on the value of the variable ICS_OR_LBCS, which should be +# defined in the environment (when calling this script from a rocoto +# workflow, the workflow should define this variable, e.g. using rocoto's +# tag). +# +# Note that when we refer to ICs, we are referring to not only the +# atmospheric fields at the initial time but also various surface fields +# (which are for now time-independent) as well as the 0-th forecast hour +# LBCs. Also, when we refer to LBCs, we are referring to the LBCs excluding +# the one at the 0-th hour. # #----------------------------------------------------------------------- # @@ -22,13 +24,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -50,15 +51,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that copies/fetches to a local di- rectory (either from disk or HPSS) the external model files from which initial or boundary condition files for the FV3 will be generated. @@ -79,14 +92,13 @@ case $EXTRN_MDL_NAME in # last CDATE for the GSMGFS to the one 6 hours before this. CDATE_max="2019061206" if [ "$CDATE" -gt "$CDATE_max" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is later than the last forecast date and time (CDATE_max) with this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_max = \"${CDATE_max}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -98,14 +110,13 @@ later than the last forecast date and time (CDATE_max) with this model: # CDATE_min="2019061212" CDATE_min="2018121500" if [ "$CDATE" -lt "$CDATE_min" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is earlier than the implementation date of this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_min = \"${CDATE_min}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -114,14 +125,13 @@ earlier than the implementation date of this model: # July 01, 2015. CDATE_min="2015070100" if [ "$CDATE" -lt "$CDATE_min" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is earlier than the implementation date of this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_min = \"${CDATE_min}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -131,14 +141,13 @@ earlier than the implementation date of this model: # ber 30, 2014. CDATE_min="2014103000" if [ "$CDATE" -lt "$CDATE_min" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is earlier than the implementation date of this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_min = \"${CDATE_min}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -164,12 +173,11 @@ elif [ "$ICS_OR_LBCS" = "LBCS" ]; then TIME_OFFSET_HRS="$EXTRN_MDL_LBCS_OFFSET_HRS" else valid_vals_ICS_OR_LBCS_str=$( printf "\"%s\" " "${valid_vals_ICS_OR_LBCS[@]}" ) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Invalid value specified for ICS_OR_LBCS: ICS_OR_LBCS = \"$ICS_OR_LBCS\" Valid values are: - ${valid_vals_ICS_OR_LBCS_str} -" + ${valid_vals_ICS_OR_LBCS_str}" fi # #----------------------------------------------------------------------- @@ -191,10 +199,10 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME}/${ICS_OR_LBCS}" # #----------------------------------------------------------------------- # -mkdir_vrfy -p "$EXTRN_MDL_FILES_DIR" -cd_vrfy $EXTRN_MDL_FILES_DIR || print_err_msg_exit "${script_name}" "\ +mkdir_vrfy -p "${EXTRN_MDL_FILES_DIR}" +cd_vrfy ${EXTRN_MDL_FILES_DIR} || print_err_msg_exit "\ Could not change directory to EXTRN_MDL_FILES_DIR: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\"" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\"" # #----------------------------------------------------------------------- # @@ -237,8 +245,8 @@ $SCRIPTSDIR/exregional_get_extrn_files.sh \ EXTRN_MDL_ARCV_FPS="${EXTRN_MDL_ARCV_FPS_str}" \ EXTRN_MDL_ARCV_FMT="${EXTRN_MDL_ARCV_FMT}" \ EXTRN_MDL_ARCVREL_DIR="${EXTRN_MDL_ARCVREL_DIR}" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -246,9 +254,10 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index a8f724c3f2..718d4af58b 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -33,7 +33,7 @@ # wide. Thus, the halo in the grid file that the grid_gen_scr # script generates must be greater than 4 since otherwise, the # shave steps would shave off cells from within the interior of -# tile 7. We will let nhw_T7 denote the width of the halo in the +# tile 7. We will let NHW denote the width of the halo in the # grid file generated by grid_gen_scr. The "n" in this variable # name denotes number of cells, the "h" is used to indicate that # it refers to a halo region, the "w" is used to indicate that it @@ -48,7 +48,7 @@ # # a) This script generates an orography file only on tile 7. # -# b) This orography file contains a halo of the same width (nhw_T7) +# b) This orography file contains a halo of the same width (NHW) # as the grid file for tile 7 generated by the grid_gen_scr script # in the previous step. # @@ -59,9 +59,9 @@ # in the temporary directory defined in WORKDIR_FLTR. Note that: # # a) The filtered orography file generated by this script contains a -# halo of the same width (nhw_T7) as the (unfiltered) orography -# file generated by script orog_gen_scr (and the grid file genera- -# ted by grid_gen_scr). +# halo of the same width (NHW) as the (unfiltered) orography file +# generated by script orog_gen_scr (and the grid file generated by +# grid_gen_scr). # # b) In analogy with the input grid files, the FV3SAR model needs as # input two (filtered) orography files -- one with no halo cells @@ -74,8 +74,8 @@ # This "shave" executable is called 4 times to generate 4 files from # the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- # tered orography file generated by orog_fltr_scr (both of which have -# a halo of width nhw_T7 cells). The 4 output files are placed in -# the temporary directory defined in WORKDIR_SHVE. More specifically: +# a halo of width NHW cells). The 4 output files are placed in the +# temporary directory defined in WORKDIR_SHVE. More specifically: # # a) shave_exec is called to shave the halo in the tile 7 grid file # generated by grid_gen_scr down to a width of 3 cells and store @@ -101,13 +101,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -116,19 +115,31 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that generates grid files. ========================================================================" # @@ -138,8 +149,8 @@ This is the J-job script for the task that generates grid files. # #----------------------------------------------------------------------- # -export gtype -export stretch_fac +export gtype="$GTYPE" +export stretch_fac=${STRETCH_FAC} # #----------------------------------------------------------------------- # @@ -150,8 +161,8 @@ export stretch_fac # ${SCRIPTSDIR}/exregional_make_grid.sh \ WORKDIR_LOCAL="ABCD" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -183,9 +194,10 @@ touch "$LOGDIR/make_grid_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_ICS similarity index 80% rename from jobs/JREGIONAL_MAKE_IC_LBC0 rename to jobs/JREGIONAL_MAKE_ICS index dd9ef4eb6a..77bf2a5325 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_ICS @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,17 +30,29 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that generates initial condition -(IC), surface, and zeroth hour lateral boundary condition (LBC0) files +(IC), surface, and zeroth-hour lateral boundary condition (LBC0) files for the FV3 (in NetCDF format). ========================================================================" # @@ -107,35 +118,9 @@ case "$MACHINE" in ;; # "HERA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - ulimit -s unlimited # ulimit -a - - module purge - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - - module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - module load esmf/8.0.0bs21 - - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - module load w3nco - module load nemsio/2.2.3 - module load bacio - module load sp - module load sfcio - module load sigio - - module load wgrib2 - - module list - APRUN="srun" - - { restore_shell_opts; } > /dev/null 2>&1 ;; # "JET") @@ -183,10 +168,9 @@ get_extrn_mdl_file_dir_info \ # #----------------------------------------------------------------------- # -WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "${script_name}" "\ +WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "\ Directory in which the wgrib2 executable is located not found: - WGRIB2_DIR = \"${WGRIB2_DIR}\" -" + WGRIB2_DIR = \"${WGRIB2_DIR}\"" # #----------------------------------------------------------------------- # @@ -205,15 +189,15 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME_ICS}/ICS" # EXTRN_MDL_FNS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FNS[@]}" )")" -$SCRIPTSDIR/exregional_make_ic_lbc0.sh \ +$SCRIPTSDIR/exregional_make_ics.sh \ EXTRN_MDL_FNS="${EXTRN_MDL_FNS_str}" \ EXTRN_MDL_FILES_DIR="${EXTRN_MDL_FILES_DIR}" \ EXTRN_MDL_CDATE="${EXTRN_MDL_CDATE}" \ ICS_DIR="${ICS_DIR}" \ WGRIB2_DIR="${WGRIB2_DIR}" \ APRUN="${APRUN}" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -221,9 +205,10 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBCS similarity index 82% rename from jobs/JREGIONAL_MAKE_LBC1_TO_LBCN rename to jobs/JREGIONAL_MAKE_LBCS index 4e3a490f52..678bd669fc 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,15 +30,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that generates lateral boundary condition (LBC) files (in NetCDF format) for all LBC update hours (ex- cept hour zero). @@ -107,35 +118,9 @@ case "$MACHINE" in ;; # "HERA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - ulimit -s unlimited # ulimit -a - - module purge - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - - module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - module load esmf/8.0.0bs21 - - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - module load w3nco - module load nemsio/2.2.3 - module load bacio - module load sp - module load sfcio - module load sigio - - module load wgrib2 - - module list - APRUN="srun" - - { restore_shell_opts; } > /dev/null 2>&1 ;; # @@ -184,10 +169,9 @@ get_extrn_mdl_file_dir_info \ # #----------------------------------------------------------------------- # -WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "${script_name}" "\ +WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "\ Directory in which the wgrib2 executable is located not found: - WGRIB2_DIR = \"${WGRIB2_DIR}\" -" + WGRIB2_DIR = \"${WGRIB2_DIR}\"" # #----------------------------------------------------------------------- # @@ -207,7 +191,7 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME_LBCS}/LBCS" EXTRN_MDL_FNS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FNS[@]}" )")" EXTRN_MDL_LBC_UPDATE_FHRS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_LBC_UPDATE_FHRS[@]}" )")" -$SCRIPTSDIR/exregional_make_lbc1_to_lbcn.sh \ +$SCRIPTSDIR/exregional_make_lbcs.sh \ EXTRN_MDL_FNS="${EXTRN_MDL_FNS_str}" \ EXTRN_MDL_FILES_DIR="${EXTRN_MDL_FILES_DIR}" \ EXTRN_MDL_CDATE="${EXTRN_MDL_CDATE}" \ @@ -215,8 +199,8 @@ $SCRIPTSDIR/exregional_make_lbc1_to_lbcn.sh \ APRUN="${APRUN}" \ LBCS_DIR="${LBCS_DIR}" \ EXTRN_MDL_LBC_UPDATE_FHRS="${EXTRN_MDL_LBC_UPDATE_FHRS_str}" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -224,9 +208,10 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index 67651ff8e9..3e9fff3b7e 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -18,19 +17,31 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that generates orography files. ========================================================================" # @@ -43,8 +54,8 @@ This is the J-job script for the task that generates orography files. # ${SCRIPTSDIR}/exregional_make_orog.sh \ WORKDIR_LOCAL="ABCD" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -76,9 +87,10 @@ touch "$LOGDIR/make_orog_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index d648381ee6..eec9f34888 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,15 +21,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that generates surface fields from climatology. ========================================================================" @@ -65,8 +76,8 @@ mkdir_vrfy $workdir # ${SCRIPTSDIR}/exregional_make_sfc_climo.sh \ workdir="$workdir" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -98,9 +109,10 @@ touch "$LOGDIR/make_sfc_climo_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FCST similarity index 69% rename from jobs/JREGIONAL_RUN_FV3 rename to jobs/JREGIONAL_RUN_FCST index 92379be94c..39e5c17b23 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FCST @@ -13,13 +13,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -28,19 +27,31 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that runs a forecast with FV3 for the specified cycle. ========================================================================" @@ -61,10 +72,10 @@ mkdir_vrfy -p ${CYCLE_DIR}/RESTART # #----------------------------------------------------------------------- # -$SCRIPTSDIR/exregional_run_fv3.sh \ +$SCRIPTSDIR/exregional_run_fcst.sh \ CYCLE_DIR="${CYCLE_DIR}" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -72,9 +83,10 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 1bfd0f918e..cde0dd7269 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -12,13 +12,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,15 +30,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the J-job script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. ========================================================================" @@ -87,8 +98,8 @@ $SCRIPTSDIR/exregional_run_post.sh \ postprd_dir="${postprd_dir}" \ fhr_dir="${fhr_dir}" \ fhr="${fhr}" || \ - print_err_msg_exit "${script_name}" "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +print_err_msg_exit "\ +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -96,9 +107,10 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/modulefiles/regional_workflow/global_equiv_resol.cheyenne b/modulefiles/codes/cheyenne/global_equiv_resol similarity index 100% rename from modulefiles/regional_workflow/global_equiv_resol.cheyenne rename to modulefiles/codes/cheyenne/global_equiv_resol diff --git a/modulefiles/regional_workflow/mosaic_file.cheyenne b/modulefiles/codes/cheyenne/mosaic_file similarity index 100% rename from modulefiles/regional_workflow/mosaic_file.cheyenne rename to modulefiles/codes/cheyenne/mosaic_file diff --git a/modulefiles/regional_workflow/regional_grid.cheyenne b/modulefiles/codes/cheyenne/regional_grid similarity index 100% rename from modulefiles/regional_workflow/regional_grid.cheyenne rename to modulefiles/codes/cheyenne/regional_grid diff --git a/modulefiles/regional_workflow/global_equiv_resol.hera b/modulefiles/codes/hera/global_equiv_resol similarity index 100% rename from modulefiles/regional_workflow/global_equiv_resol.hera rename to modulefiles/codes/hera/global_equiv_resol diff --git a/modulefiles/regional_workflow/mosaic_file.hera b/modulefiles/codes/hera/mosaic_file similarity index 100% rename from modulefiles/regional_workflow/mosaic_file.hera rename to modulefiles/codes/hera/mosaic_file diff --git a/modulefiles/regional_workflow/regional_grid.hera b/modulefiles/codes/hera/regional_grid similarity index 100% rename from modulefiles/regional_workflow/regional_grid.hera rename to modulefiles/codes/hera/regional_grid diff --git a/modulefiles/regional_workflow/enkf_chgres_recenter.theia b/modulefiles/regional_workflow/enkf_chgres_recenter.theia deleted file mode 100644 index 369c013f33..0000000000 --- a/modulefiles/regional_workflow/enkf_chgres_recenter.theia +++ /dev/null @@ -1,14 +0,0 @@ -#%Module##################################################### -## enkf_chgres_recenter component - theia -############################################################# - -module load intel/16.1.150 - -module use -a /scratch3/NCEPDEV/nwprod/lib/modulefiles -module load nemsio/v2.2.3 -module load bacio/v2.0.2 -module load w3nco/v2.0.6 -module load ip/v3.0.0 -module load sp/v2.0.2 - -export FC=ifort diff --git a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss b/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss deleted file mode 100644 index c341291800..0000000000 --- a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss +++ /dev/null @@ -1,13 +0,0 @@ -#%Module##################################################### -## enkf_chgres_recenter component - wcoss -############################################################# - -module unload ics -module load ics/16.0.3 -module load nemsio/v2.2.3 -module load bacio/v2.0.2 -module load w3nco/v2.0.6 -module load ip/v3.0.0 -module load sp/v2.0.2 - -export FC=ifort diff --git a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_cray b/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_cray deleted file mode 100644 index 18ad66ada8..0000000000 --- a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_cray +++ /dev/null @@ -1,19 +0,0 @@ -#%Module##################################################### -## enkf_chgres_recenter component - wcoss_cray -############################################################# - -module load ncep/1.0 -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/16.3.210 -module load cray-mpich/7.2.0 -module load craype-haswell -module load cray-netcdf - -module load nemsio-intel/2.2.3 -module load bacio-intel/2.0.2 -module load w3nco-intel/2.0.6 -module load ip-intel/3.0.0 -module load sp-intel/2.0.2 - -export FC=ftn diff --git a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_cray_userlib b/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_cray_userlib deleted file mode 100644 index 9a7bd7cd60..0000000000 --- a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_cray_userlib +++ /dev/null @@ -1,21 +0,0 @@ -#%Module##################################################### -## enkf_chgres_recenter component - wcoss_cray -############################################################# - -module load ncep/1.0 -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/16.3.210 -module load cray-mpich/7.2.0 -module load craype-haswell -module load cray-netcdf - -module unuse /gpfs/hps/nco/ops/nwprod/lib/modulefiles -module use -a $MOD_PATH -module load nemsio-intel/2.2.3 -module load bacio-intel/2.0.2 -module load w3nco-intel/2.0.6 -module load ip-intel/3.0.0 -module load sp-intel/2.0.2 - -export FC=ftn diff --git a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_dell_p3 b/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_dell_p3 deleted file mode 100644 index 668853dca5..0000000000 --- a/modulefiles/regional_workflow/enkf_chgres_recenter.wcoss_dell_p3 +++ /dev/null @@ -1,15 +0,0 @@ -#%Module##################################################### -## enkf_chgres_recenter component - wcoss_dell_p3 -############################################################# - -module load ips/18.0.1.163 -module load impi/18.0.1 - -module load NetCDF/4.5.0 -module load nemsio/2.2.3 -module load bacio/2.0.2 -module load w3nco/2.0.6 -module load ip/3.0.1 -module load sp/2.0.2 - -export FC=ifort diff --git a/modulefiles/regional_workflow/fre-nctools.cheyenne b/modulefiles/regional_workflow/fre-nctools.cheyenne deleted file mode 100644 index 9a7bcb2028..0000000000 --- a/modulefiles/regional_workflow/fre-nctools.cheyenne +++ /dev/null @@ -1,11 +0,0 @@ -#%Module##################################################### -## Module file for fre-nctools for NCAR/UCAR Cheyenne machine -############################################################# -module purge -module load ncarenv/1.2 -module load intel/18.0.1 -module load ncarcompilers/0.4.1 -module load impi/2018.1.163 -module load netcdf/4.6.1 -# No hdf5 loaded since netcdf and hdf5 reside together on cheyenne - diff --git a/modulefiles/regional_workflow/fre-nctools.gaea b/modulefiles/regional_workflow/fre-nctools.gaea deleted file mode 100644 index edce663573..0000000000 --- a/modulefiles/regional_workflow/fre-nctools.gaea +++ /dev/null @@ -1,10 +0,0 @@ -#%Module##################################################### -## Module file for fre-nctools -############################################################# - module rm PrgEnv-pgi PrgEnv-pathscale PrgEnv-intel PrgEnv-cray PrgEnv-gnu - module load PrgEnv-intel - module swap intel intel/15.0.2.164 - module load cray-hdf5 - module load cray-netcdf - #setenv LIBRARY_PATH ${LIBRARY_PATH}:${NETCDF_DIR}/lib:${HDF5}/lib - diff --git a/modulefiles/regional_workflow/fre-nctools.jet b/modulefiles/regional_workflow/fre-nctools.jet deleted file mode 100644 index 37c1fdc737..0000000000 --- a/modulefiles/regional_workflow/fre-nctools.jet +++ /dev/null @@ -1,28 +0,0 @@ -#%Module###################################################################### - #module load intel - #module load mvapich2 - #module load netcdf - #module load szip - #module load hdf5 - module purge - module load intel/18.0.5.274 - module load impi/2018.4.274 - #module load intel/19.0.1.144 - #module load impi/2019.1.144 - module load szip/2.1 - module load hdf5/1.8.9 - module load netcdf/4.2.1.1 - #module load slurm/18.08.6 - #module load contrib anaconda - - - #module load intel/15.0.3.187 - #module load impi/5.0.3.048 - #module load szip - #module load hdf5 - #module load netcdf4/4.2.1.1 - - #export HDF5_DIR=$HDF5 - #export NETCDF_DIR=$NETCDF4 - #export HDF5_HOME=$HDF5 - #export NETCDF_HOME=$NETCDF4 diff --git a/modulefiles/regional_workflow/fre-nctools.theia b/modulefiles/regional_workflow/fre-nctools.theia deleted file mode 100644 index c1b6784111..0000000000 --- a/modulefiles/regional_workflow/fre-nctools.theia +++ /dev/null @@ -1,7 +0,0 @@ -#%Module##################################################### -## Module file for fre-nctools -############################################################# -module load intel/14.0.2 -module load impi/4.1.3.048 -module load netcdf/4.3.0 -module load hdf5/1.8.14 diff --git a/modulefiles/regional_workflow/fre-nctools.wcoss_cray b/modulefiles/regional_workflow/fre-nctools.wcoss_cray deleted file mode 100755 index c143b3a35e..0000000000 --- a/modulefiles/regional_workflow/fre-nctools.wcoss_cray +++ /dev/null @@ -1,9 +0,0 @@ -#%Module##################################################### -## Module file for fre-nctools -############################################################# -module load PrgEnv-intel -module rm intel -module load intel/16.3.210 -module load craype-haswell -module load cray-netcdf -module load cray-hdf5 diff --git a/modulefiles/regional_workflow/fre-nctools.wcoss_dell_p3 b/modulefiles/regional_workflow/fre-nctools.wcoss_dell_p3 deleted file mode 100755 index 75a933a350..0000000000 --- a/modulefiles/regional_workflow/fre-nctools.wcoss_dell_p3 +++ /dev/null @@ -1,9 +0,0 @@ -#%Module##################################################### -## Module file for fre-nctools, wcoss_dell_p3 -############################################################# -# -module load ips/18.0.1.163 -module load impi/18.0.1 - -module load NetCDF/4.5.0 -module load HDF5-serial/1.10.1 diff --git a/modulefiles/regional_workflow/gaussian_sfcanl.theia b/modulefiles/regional_workflow/gaussian_sfcanl.theia deleted file mode 100644 index d70de3df8f..0000000000 --- a/modulefiles/regional_workflow/gaussian_sfcanl.theia +++ /dev/null @@ -1,19 +0,0 @@ -#%Module##################################################### -## gaussian_sfcanl build module for Theia -############################################################# - -# Loading Intel Compiler Suite -module load intel/15.1.133 - -# Loding nceplibs modules -module use -a /scratch3/NCEPDEV/nwprod/lib/modulefiles -module load w3nco/v2.0.6 -module load bacio/v2.0.2 -module load nemsio/v2.2.3 -module load sp/v2.0.2 -module load netcdf/4.3.0 -module load hdf5/1.8.14 -export NETCDF_INCLUDE="-I${NETCDF}/include" -export NETCDF_LDFLAGS_F="-L${NETCDF}/lib -lnetcdf -lnetcdff -L${HDF5}/lib -lhdf5 -lhdf5_fortran" - -export FCOMP=ifort diff --git a/modulefiles/regional_workflow/gaussian_sfcanl.wcoss b/modulefiles/regional_workflow/gaussian_sfcanl.wcoss deleted file mode 100644 index 8e89eff6f4..0000000000 --- a/modulefiles/regional_workflow/gaussian_sfcanl.wcoss +++ /dev/null @@ -1,15 +0,0 @@ -#%Module##################################################### -## gaussian_sfcanl build module - wcoss -############################################################# - -# Loading Intel Compiler Suite -module load ics/16.0.3 - -# Loading nceplibs modules -module load w3nco/v2.0.6 -module load bacio/v2.0.2 -module load nemsio/v2.2.3 -module load sp/v2.0.2 -module load NetCDF/4.2/serial - -export FCOMP=ifort diff --git a/modulefiles/regional_workflow/gaussian_sfcanl.wcoss_cray b/modulefiles/regional_workflow/gaussian_sfcanl.wcoss_cray deleted file mode 100644 index e20c863d8e..0000000000 --- a/modulefiles/regional_workflow/gaussian_sfcanl.wcoss_cray +++ /dev/null @@ -1,20 +0,0 @@ -#%Module##################################################### -## gaussian_sfcanl component of fv3gfs - cray -############################################################# - -set ver v15.0.0 - -# Load Intel environment -module load PrgEnv-intel/5.2.56 -module rm intel -module rm NetCDF-intel-sandybridge/4.2 -module load intel/16.3.210 -module load cray-netcdf -module load craype-haswell - -module load nemsio-intel/2.2.3 -module load w3nco-intel/2.0.6 -module load bacio-intel/2.0.2 -module load sp-intel/2.0.2 - -export FCOMP=ftn diff --git a/modulefiles/regional_workflow/gaussian_sfcanl.wcoss_dell_p3 b/modulefiles/regional_workflow/gaussian_sfcanl.wcoss_dell_p3 deleted file mode 100644 index e16660ae5e..0000000000 --- a/modulefiles/regional_workflow/gaussian_sfcanl.wcoss_dell_p3 +++ /dev/null @@ -1,18 +0,0 @@ -#%Module##################################################### -## gaussian_sfcanl component of fv3gfs - wcoss_dell_p3 -############################################################# - -set ver v15.0.0 - -# Load Intel environment -module load ips/18.0.1.163 -module load impi/18.0.1 - -module load NetCDF/4.5.0 -module load HDF5-serial/1.10.1 -module load nemsio/2.2.3 -module load w3nco/2.0.6 -module load bacio/2.0.2 -module load sp/2.0.2 - -export FCOMP=ifort diff --git a/modulefiles/regional_workflow/global_cycle.theia b/modulefiles/regional_workflow/global_cycle.theia deleted file mode 100644 index 2f0f82cec8..0000000000 --- a/modulefiles/regional_workflow/global_cycle.theia +++ /dev/null @@ -1,20 +0,0 @@ -#%Module##################################################### -## global_cycle component - theia -############################################################# - -# Loading Intel Compiler Suite -module load intel/16.1.150 -module load impi - -# Loding nceplibs modules -module use -a /scratch3/NCEPDEV/nwprod/lib/modulefiles -module load w3nco/v2.0.6 -module load sp/v2.0.2 -module load bacio/v2.0.1 -module load ip/v3.0.0 -module load netcdf/4.3.0 -module load hdf5/1.8.14 -export NETCDF_INCLUDE="-I${NETCDF}/include" -export NETCDF_LDFLAGS_F="-L${NETCDF}/lib -lnetcdf -lnetcdff -L${HDF5}/lib -lhdf5 -lhdf5_fortran" - -export FCMP=mpiifort diff --git a/modulefiles/regional_workflow/global_cycle.wcoss b/modulefiles/regional_workflow/global_cycle.wcoss deleted file mode 100644 index fbab1cc947..0000000000 --- a/modulefiles/regional_workflow/global_cycle.wcoss +++ /dev/null @@ -1,16 +0,0 @@ -#%Module##################################################### -## global_cycle component - wcoss -############################################################# - -# Loading Intel Compiler Suite -module load ics/16.0.3 -module load ibmpe/1.3.0.12 - -# Loading nceplibs modules -module load w3nco/v2.0.6 -module load sp/v2.0.2 -module load bacio/v2.0.2 -module load ip/v3.0.0 -module load NetCDF/4.2/serial - -export FCMP=mpfort diff --git a/modulefiles/regional_workflow/global_cycle.wcoss_cray b/modulefiles/regional_workflow/global_cycle.wcoss_cray deleted file mode 100644 index e1a4ed9d11..0000000000 --- a/modulefiles/regional_workflow/global_cycle.wcoss_cray +++ /dev/null @@ -1,21 +0,0 @@ -#%Module##################################################### -## global_cycle component - wcoss_cray -############################################################# -# Load ncep environment -module load ncep/1.0 - -# Load Intel environment -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/16.3.210 -module load cray-mpich/7.2.0 -module load craype-haswell -module load cray-netcdf - -# Load NCEPLIBS modules -module load w3nco-intel/2.0.6 -module load sp-intel/2.0.2 -module load ip-intel/3.0.0 -module load bacio-intel/2.0.1 - -export FCMP=ftn diff --git a/modulefiles/regional_workflow/global_cycle.wcoss_cray_userlib b/modulefiles/regional_workflow/global_cycle.wcoss_cray_userlib deleted file mode 100644 index de700b6de8..0000000000 --- a/modulefiles/regional_workflow/global_cycle.wcoss_cray_userlib +++ /dev/null @@ -1,24 +0,0 @@ -#%Module##################################################### -## global_cycle component - wcoss_cray -############################################################# -module purge -# Load ncep environment -module load ncep/1.0 - -# Load Intel environment -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/16.3.210 -module load cray-mpich/7.2.0 -module load craype-haswell -module load cray-netcdf - -# Load NCEPLIBS modules -module unuse /gpfs/hps/nco/ops/nwprod/lib/modulefiles -module use -a $MOD_PATH -module load w3nco-intel/2.0.6 -module load sp-intel/2.0.2 -module load ip-intel/3.0.0 -module load bacio-intel/2.0.1 - -export FCMP=ftn diff --git a/modulefiles/regional_workflow/global_cycle.wcoss_dell_p3 b/modulefiles/regional_workflow/global_cycle.wcoss_dell_p3 deleted file mode 100644 index 3e1ff1e9a3..0000000000 --- a/modulefiles/regional_workflow/global_cycle.wcoss_dell_p3 +++ /dev/null @@ -1,15 +0,0 @@ -#%Module##################################################### -## global_cycle component - wcoss_dell_p3 -############################################################# - -module load ips/18.0.1.163 -module load impi/18.0.1 - -module load NetCDF/4.5.0 -module load w3nco/2.0.6 -module load sp/2.0.2 -module load ip/3.0.1 -module load bacio/2.0.2 - -export FCMP=mpif90 - diff --git a/modulefiles/regional_workflow/global_equiv_resol.theia b/modulefiles/regional_workflow/global_equiv_resol.theia deleted file mode 100644 index 6297ee3e47..0000000000 --- a/modulefiles/regional_workflow/global_equiv_resol.theia +++ /dev/null @@ -1,8 +0,0 @@ -#%Module##################################################### -## Module file for regional_grid -############################################################# -module purge -module load intel/18.1.163 -module load netcdf/4.6.1 -module load hdf5/1.10.4 - diff --git a/modulefiles/regional_workflow/orog.cheyenne b/modulefiles/regional_workflow/orog.cheyenne deleted file mode 100644 index b4450e00c8..0000000000 --- a/modulefiles/regional_workflow/orog.cheyenne +++ /dev/null @@ -1,9 +0,0 @@ -#%Module##################################################### -## Module file for orog on NCAR/UCAR Cheyenne -############################################################# -# Loading Intel Compiler Suite -module load intel/18.0.1 -module load netcdf/4.6.1 - -# NCEP libraries are pre-built for now - diff --git a/modulefiles/regional_workflow/orog.hera b/modulefiles/regional_workflow/orog.hera deleted file mode 100644 index 130fcb9357..0000000000 --- a/modulefiles/regional_workflow/orog.hera +++ /dev/null @@ -1,15 +0,0 @@ -#%Module##################################################### -## Module file for orog -############################################################# -# Loading Intel Compiler Suite -module purge -module load intel/18.0.5.274 -module load netcdf/4.6.1 - -# Loading nceplibs modules -module use -a $MOD_PATH -module load ip/v2.0.0 -module load sp/v2.0.2 -module load w3emc/v2.2.0 -module load w3nco/v2.0.6 -module load bacio/v2.0.2 diff --git a/modulefiles/regional_workflow/orog.jet b/modulefiles/regional_workflow/orog.jet deleted file mode 100644 index c55b1d4e2d..0000000000 --- a/modulefiles/regional_workflow/orog.jet +++ /dev/null @@ -1,33 +0,0 @@ -#%Module##################################################### -## Module file for orog -############################################################# -# Loading Intel Compiler Suite -#module unload intel -#module load intel/15.0.3.187 - -#module load impi/5.1.1.109 -#module load szip -#module load hdf5 -#module load netcdf4/4.2.1.1 - - module load intel/18.0.5.274 - module load impi/2018.4.274 - module load szip/2.1 - module load hdf5/1.8.9 - module load netcdf/4.2.1.1 - #module load slurm/18.08.6 - # module load contrib anaconda - - -# Loding nceplibs modules -# -export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib -module use /mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib/modulefiles -#module use -a $MOD_PATH - -module load ip-intel-sandybridge/2.0.0 -module load sp-intel-sandybridge/2.0.2 -module load w3nco-intel-sandybridge/2.0.6 -module load w3emc-intel-sandybridge/2.2.0 -module load bacio-intel-sandybridge/2.0.2 - diff --git a/modulefiles/regional_workflow/orog.odin b/modulefiles/regional_workflow/orog.odin deleted file mode 100644 index b47d6c6852..0000000000 --- a/modulefiles/regional_workflow/orog.odin +++ /dev/null @@ -1,33 +0,0 @@ -#%Module##################################################### -############################################################# -## Fanglin.Yang@noaa.gov -## NOAA/NWS/NCEP/EMC -############################################################# -#proc ModulesHelp { } { -#puts stderr "Set environment veriables for orog\n" -#} -#module-whatis "orog" - -module use /oldscratch/ywang/external/modulefiles - -# Load ncep environment -#module load ncep/1.0 - -# Load Intel environment -#module load PrgEnv-intel/5.2.56 -#module rm intel -#module load intel/16.3.210 - -#module load cray-mpich/7.2.0 -#module load craype-haswell -#module load cray-netcdf - -# Load NCEPLIBS modules -module load w3emc/v2.3.0 -module load ip/v3.0.0 -module load sp/v2.0.2 -module load w3nco/v2.0.6 -module load bacio/v2.0.2 - -export FCMP=ftn -export CCMP=cc diff --git a/modulefiles/regional_workflow/orog.theia b/modulefiles/regional_workflow/orog.theia deleted file mode 100644 index 50eb7e7963..0000000000 --- a/modulefiles/regional_workflow/orog.theia +++ /dev/null @@ -1,16 +0,0 @@ -#%Module##################################################### -## Module file for orog -############################################################# -# Loading Intel Compiler Suite -module unload intel -module load intel/16.1.150 - -module load netcdf/4.3.0 - -# Loading nceplibs modules -module use -a $MOD_PATH -module load ip/v2.0.0 -module load sp/v2.0.2 -module load w3emc/v2.2.0 -module load w3nco/v2.0.6 -module load bacio/v2.0.2 diff --git a/modulefiles/regional_workflow/orog.wcoss b/modulefiles/regional_workflow/orog.wcoss deleted file mode 100644 index 9aa30c5b23..0000000000 --- a/modulefiles/regional_workflow/orog.wcoss +++ /dev/null @@ -1,16 +0,0 @@ -#%Module##################################################### -## Module file for orog -############################################################# - -# Loading Intel Compiler Suite -module load ics/16.0.3 - -# Loding nceplibs modules -module load w3nco/v2.0.6 -module load w3emc/v2.2.0 -module load sp/v2.0.2 -module load ip/v2.0.0 -module load bacio/v2.0.2 -module load NetCDF - -export FCMP=ifort diff --git a/modulefiles/regional_workflow/orog.wcoss_cray b/modulefiles/regional_workflow/orog.wcoss_cray deleted file mode 100644 index 51cb66a186..0000000000 --- a/modulefiles/regional_workflow/orog.wcoss_cray +++ /dev/null @@ -1,18 +0,0 @@ -#%Module##################################################### -## Module file for orog -############################################################# -module load ncep/1.0 -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/16.3.210 -module load cray-mpich/7.2.0 -module load craype-haswell -module load cray-netcdf - -module load w3emc-intel/2.2.0 -module load ip-intel/2.0.0 -module load sp-intel/2.0.2 -module load w3nco-intel/2.0.6 -module load bacio-intel/2.0.1 - -export FCMP=ftn diff --git a/modulefiles/regional_workflow/orog.wcoss_cray_userlib b/modulefiles/regional_workflow/orog.wcoss_cray_userlib deleted file mode 100644 index 4698b9cb37..0000000000 --- a/modulefiles/regional_workflow/orog.wcoss_cray_userlib +++ /dev/null @@ -1,20 +0,0 @@ -#%Module##################################################### -## Module file for orog -############################################################# -module load ncep/1.0 -module load PrgEnv-intel/5.2.56 -module rm intel -module load intel/16.3.210 -module load cray-mpich/7.2.0 -module load craype-haswell -module load cray-netcdf - -module unuse /gpfs/hps/nco/ops/nwprod/lib/modulefiles -module use -a $MOD_PATH -module load ip/v2.0.0 -module load sp/v2.0.2 -module load w3emc/v2.2.0 -module load w3nco/v2.0.6 -module load bacio/v2.0.2 - -export FCMP=ftn diff --git a/modulefiles/regional_workflow/orog.wcoss_dell_p3 b/modulefiles/regional_workflow/orog.wcoss_dell_p3 deleted file mode 100644 index adde9c2aa5..0000000000 --- a/modulefiles/regional_workflow/orog.wcoss_dell_p3 +++ /dev/null @@ -1,14 +0,0 @@ -#%Module##################################################### -## Module file for orog, wcoss_dell_p3 -############################################################# -module load ips/18.0.1.163 -module load impi/18.0.1 - -module load NetCDF/4.5.0 -module load w3emc/2.3.0 -module load w3nco/2.0.6 -module load sp/2.0.2 -module load ip/3.0.1 -module load bacio/2.0.2 - -export FCMP=ifort diff --git a/modulefiles/regional_workflow/regional_grid.theia b/modulefiles/regional_workflow/regional_grid.theia deleted file mode 100644 index 6297ee3e47..0000000000 --- a/modulefiles/regional_workflow/regional_grid.theia +++ /dev/null @@ -1,8 +0,0 @@ -#%Module##################################################### -## Module file for regional_grid -############################################################# -module purge -module load intel/18.1.163 -module load netcdf/4.6.1 -module load hdf5/1.10.4 - diff --git a/modulefiles/regional_workflow/shave.odin b/modulefiles/regional_workflow/shave.odin deleted file mode 100644 index 01727bef46..0000000000 --- a/modulefiles/regional_workflow/shave.odin +++ /dev/null @@ -1,18 +0,0 @@ -#%Module##################################################### -############################################################# -## shave component - wcoss_cray -############################################################# -# Load ncep environment -#module load ncep/1.0 - -# Load Intel environment -#module load PrgEnv-intel/5.2.56 -#module rm intel -#module load intel/16.3.210 - -#module load cray-mpich/7.2.0 -#module load craype-haswell -#module load cray-netcdf - -export FCMP=ftn -export FFLAGS="-O0" diff --git a/modulefiles/regional_workflow/fre-nctools.odin b/modulefiles/tasks/hera/get_extrn_ics similarity index 60% rename from modulefiles/regional_workflow/fre-nctools.odin rename to modulefiles/tasks/hera/get_extrn_ics index 09d988049e..9935033fd2 100644 --- a/modulefiles/regional_workflow/fre-nctools.odin +++ b/modulefiles/tasks/hera/get_extrn_ics @@ -1,6 +1,8 @@ #%Module##################################################### -## Module file for fre-nctools +## Module file for get_extrn_ics task. ############################################################# - #module load ics/16.0.3 - #module load NetCDF + +module purge + +module load hpss diff --git a/modulefiles/regional_workflow/fre-nctools.wcoss b/modulefiles/tasks/hera/get_extrn_lbcs similarity index 59% rename from modulefiles/regional_workflow/fre-nctools.wcoss rename to modulefiles/tasks/hera/get_extrn_lbcs index ce886dcab3..1919f3355a 100644 --- a/modulefiles/regional_workflow/fre-nctools.wcoss +++ b/modulefiles/tasks/hera/get_extrn_lbcs @@ -1,6 +1,8 @@ #%Module##################################################### -## Module file for fre-nctools +## Module file for get_extrn_lbcs task. ############################################################# -module load ics/16.0.3 -module load NetCDF/4.2/serial +module purge + +module load hpss + diff --git a/modulefiles/regional_workflow/mosaic_file.theia b/modulefiles/tasks/hera/make_grid similarity index 54% rename from modulefiles/regional_workflow/mosaic_file.theia rename to modulefiles/tasks/hera/make_grid index 6297ee3e47..2d651a5c94 100644 --- a/modulefiles/regional_workflow/mosaic_file.theia +++ b/modulefiles/tasks/hera/make_grid @@ -1,8 +1,10 @@ #%Module##################################################### -## Module file for regional_grid +## Module file for make_grid task. ############################################################# + module purge -module load intel/18.1.163 -module load netcdf/4.6.1 -module load hdf5/1.10.4 + +module load intel/18.0.5.274 +module load netcdf/4.7.0 +module load hdf5/1.10.5 diff --git a/modulefiles/tasks/hera/make_ics.hardcoded b/modulefiles/tasks/hera/make_ics.hardcoded new file mode 100644 index 0000000000..850bdfe5a4 --- /dev/null +++ b/modulefiles/tasks/hera/make_ics.hardcoded @@ -0,0 +1,22 @@ +#%Module##################################################### +## Module file for make_ics task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.7.0 + +module use -a /scratch1/NCEPDEV/global/gwv/l819/lib/modulefiles +module load esmflocal/8_0_48b.netcdf47 + +module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles +module load w3nco/2.0.6 +module load nemsio/2.2.3 +module load bacio/2.0.2 +module load sp/2.0.2 +module load sfcio/1.1.0 +module load sigio/2.1.0 + +module load wgrib2/2.0.8 diff --git a/modulefiles/tasks/hera/make_ics.local b/modulefiles/tasks/hera/make_ics.local new file mode 100644 index 0000000000..d1644e89ea --- /dev/null +++ b/modulefiles/tasks/hera/make_ics.local @@ -0,0 +1 @@ +module load wgrib2/2.0.8 diff --git a/modulefiles/tasks/hera/make_lbcs.hardcoded b/modulefiles/tasks/hera/make_lbcs.hardcoded new file mode 100644 index 0000000000..842e655ffb --- /dev/null +++ b/modulefiles/tasks/hera/make_lbcs.hardcoded @@ -0,0 +1,22 @@ +#%Module##################################################### +## Module file for make_lbcs task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.7.0 + +module use -a /scratch1/NCEPDEV/global/gwv/l819/lib/modulefiles +module load esmflocal/8_0_48b.netcdf47 + +module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles +module load w3nco/2.0.6 +module load nemsio/2.2.3 +module load bacio/2.0.2 +module load sp/2.0.2 +module load sfcio/1.1.0 +module load sigio/2.1.0 + +module load wgrib2/2.0.8 diff --git a/modulefiles/tasks/hera/make_lbcs.local b/modulefiles/tasks/hera/make_lbcs.local new file mode 100644 index 0000000000..d1644e89ea --- /dev/null +++ b/modulefiles/tasks/hera/make_lbcs.local @@ -0,0 +1 @@ +module load wgrib2/2.0.8 diff --git a/modulefiles/regional_workflow/fre-nctools.hera b/modulefiles/tasks/hera/make_orog.hardcoded similarity index 68% rename from modulefiles/regional_workflow/fre-nctools.hera rename to modulefiles/tasks/hera/make_orog.hardcoded index a450b1a31a..295940dae9 100644 --- a/modulefiles/regional_workflow/fre-nctools.hera +++ b/modulefiles/tasks/hera/make_orog.hardcoded @@ -1,7 +1,10 @@ #%Module##################################################### -## Module file for fre-nctools +## Module file for make_orog task. ############################################################# + +module purge + module load intel/18.0.5.274 -module load impi/2018.0.4 -module load netcdf/4.6.1 +module load netcdf/4.7.0 module load hdf5/1.10.4 + diff --git a/modulefiles/tasks/hera/make_sfc_climo.hardcoded b/modulefiles/tasks/hera/make_sfc_climo.hardcoded new file mode 100644 index 0000000000..3e06d3e730 --- /dev/null +++ b/modulefiles/tasks/hera/make_sfc_climo.hardcoded @@ -0,0 +1,12 @@ +#%Module##################################################### +## Module file for make_sfc_climo task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load impi/2018.0.4 + +module use -a /scratch1/NCEPDEV/global/gwv/l819/lib/modulefiles +module load esmflocal/8_0_48b.netcdf47 + diff --git a/modulefiles/tasks/hera/run_fcst_no_ccpp.hardcoded b/modulefiles/tasks/hera/run_fcst_no_ccpp.hardcoded new file mode 100644 index 0000000000..290aff9daf --- /dev/null +++ b/modulefiles/tasks/hera/run_fcst_no_ccpp.hardcoded @@ -0,0 +1,12 @@ +#%Module##################################################### +## Module file for run_fcst task without CCPP. +############################################################# + +module purge + +module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.6.1 +module load pnetcdf/1.10.0 + diff --git a/modulefiles/tasks/hera/run_post b/modulefiles/tasks/hera/run_post new file mode 100644 index 0000000000..70df4e2e82 --- /dev/null +++ b/modulefiles/tasks/hera/run_post @@ -0,0 +1,30 @@ +#%Module##################################################### +## Module file for run_post task. +############################################################# + +module purge + +module load intel/19.0.4.243 +module load impi/2019.0.4 + +# Load nceplibs modules. +module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles + +module load sigio/2.1.1 +module load jasper/1.900.1 +module load png/1.2.44 +module load z/1.2.11 +module load sfcio/1.1.1 +module load nemsio/2.2.4 +module load bacio/2.0.3 +module load g2/3.1.1 +module load gfsio/1.1.0 +module load ip/3.0.2 +module load sp/2.0.3 +module load w3emc/2.3.1 +module load w3nco/2.0.7 +module load crtm/2.2.5 +module load netcdf/4.7.0 +module load g2tmpl/1.5.1 +module load wrfio/1.1.1 + diff --git a/regional/build_regional b/regional/build_regional deleted file mode 100755 index 82f6b2e386..0000000000 --- a/regional/build_regional +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/bash -set -eux - -if [ $# -eq 0 ] ; then - echo "ERROR: You must provide the platform as a command-line argument" - exit 1 -fi - -SITE=${1} - -RGNL_WFLOW_DIR=$( pwd )/.. -if [ ${SITE} == "cheyenne" ]; then - export NCEPLIB_DIR=/glade/p/ral/jntp/GMTB/tools/NCEPlibs/20180717/intel-18.0.1/ -fi - -cd ${RGNL_WFLOW_DIR}/sorc -# -# The following build several new utilities needed in order to use the -# new Jim Purser-type grid in the SAR. The following only works on -# theia for now. It needs to be ported to other platforms. -# -./build_regional_grid.sh ${SITE} >& out.build_regional_grid -./build_global_equiv_resol.sh ${SITE} >& out.build_global_equiv_resol -./build_mosaic_file.sh ${SITE} >& out.build_mosaic_file -# -# Build sfc_climo_gen. -# -cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_develop/sorc -./build_sfc_climo_gen.sh >& out.build_sfc_climo_gen -cp ../exec/sfc_climo_gen ${RGNL_WFLOW_DIR}/exec -# -# Build fre-nctools. -# -./build_fre-nctools.sh >& out.build_fre-nctools -cp ../exec/filter_topo ${RGNL_WFLOW_DIR}/exec -cp ../exec/fregrid ${RGNL_WFLOW_DIR}/exec -cp ../exec/fregrid_parallel ${RGNL_WFLOW_DIR}/exec -cp ../exec/make_hgrid ${RGNL_WFLOW_DIR}/exec -cp ../exec/make_hgrid_parallel ${RGNL_WFLOW_DIR}/exec -cp ../exec/make_solo_mosaic ${RGNL_WFLOW_DIR}/exec -cp ../exec/shave.x ${RGNL_WFLOW_DIR}/exec -# -# Build orog. -# -./build_orog.sh >& out.build_orog -cp ../exec/orog.x ${RGNL_WFLOW_DIR}/exec -# -# Build chgres_cube. -# -# The first case is using Larissa's make.sh script, the second case uses -# the more formal way also used by the other codes in UFS_UTILS. -if [ 0 = 1 ]; then - cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_chgres_grib2/sorc/chgres_cube.fd - ./make.sh >& out.build_chgres_cube -# Is the following needed? The version of the global_chgres.exe execu- -# table used by the tasks that use chgres_cube (the ICs/surf/LBC0 and -# LBC1_to_LBCN tasks) is located in the exec directory under the UFS_- -# UTILS_chgres_grib2 directory, not in ${RGNL_WFLOW_DIR}/exec. - cp ../exec/global_chgres.exe ${RGNL_WFLOW_DIR}/exec -else - cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_chgres_grib2/sorc - ./build_chgres_cube.sh >& out.build_chgres_cube -# Is the following needed? The version of the global_chgres.exe execu- -# table used by the tasks that use chgres_cube (the ICs/surf/LBC0 and -# LBC1_to_LBCN tasks) is located in the exec directory under the UFS_- -# UTILS_chgres_grib2 directory, not in ${RGNL_WFLOW_DIR}/exec. - cp ../exec/chgres_cube.exe ${RGNL_WFLOW_DIR}/exec -fi - -# -# Built EMC_post -# -cd ${RGNL_WFLOW_DIR}/sorc/EMC_post/sorc -./build_ncep_post.sh >& out.build_EMC_post -cp ../exec/ncep_post ${RGNL_WFLOW_DIR}/exec - -# prepare fixed data directories - -cd ${RGNL_WFLOW_DIR} -mkdir -p fix/fix_fv3 -cd fix - -if [ ${SITE} == "theia" ]; then - - ln -sfn /scratch4/NCEPDEV/global/save/glopara/git/fv3gfs/fix/fix_am fix_am - -elif [ ${SITE} == "hera" ]; then - - ln -sfn /scratch1/NCEPDEV/global/glopara/fix/fix_am fix_am - -elif [ ${SITE} == "wcoss" ] || [ ${SITE} == "dell" ]; then - - ln -sfn /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_am fix_am - -elif [ ${SITE} == "wcoss_cray" ]; then - - ln -sfn /gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_am fix_am - echo "module swap pmi pmi/5.0.11" >> ${FV3GFS_DIR}/../NEMSfv3gfs/modulefiles/wcoss_cray/fv3 - -elif [ ${SITE} == "odin" ]; then - - ln -sfn /scratch/ywang/fix/theia_fix/fix_am fix_am - -elif [ ${SITE} == "cheyenne" ]; then - - ln -sfn /glade/p/ral/jntp/GMTB/FV3GFS_V1_RELEASE/fix/fix_am/ fix_am - -elif [ ${SITE} == "jet" ]; then - - ln -sfn regional/build_regional/fix/fix_am fix_am - -else - - echo "Unknown site " ${SITE} - exit - -fi diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index c21a898a09..d684d18631 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -18,19 +17,31 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that copies/fetches to a local direc- tory (either from disk or HPSS) the external model files from which ini- tial or boundary condition files for the FV3 will be generated. @@ -45,24 +56,26 @@ tial or boundary condition files for the FV3 will be generated. # #----------------------------------------------------------------------- # -valid_args=( "EXTRN_MDL_FNS" "EXTRN_MDL_SYSDIR" "EXTRN_MDL_FILES_DIR" \ - "EXTRN_MDL_ARCV_FNS" "EXTRN_MDL_ARCV_FPS" "EXTRN_MDL_ARCV_FMT" \ - "EXTRN_MDL_ARCVREL_DIR" ) +valid_args=( \ +"EXTRN_MDL_FNS" \ +"EXTRN_MDL_SYSDIR" \ +"EXTRN_MDL_FILES_DIR" \ +"EXTRN_MDL_ARCV_FNS" \ +"EXTRN_MDL_ARCV_FPS" \ +"EXTRN_MDL_ARCV_FMT" \ +"EXTRN_MDL_ARCVREL_DIR" \ +) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -90,26 +103,30 @@ for FP in "${EXTRN_MDL_FPS[@]}"; do if [ -f "$FP" ]; then if [ $( find "$FP" -mmin +${min_age} ) ]; then + num_files_found_on_disk=$(( num_files_found_on_disk+1 )) - print_info_msg "\n\ + print_info_msg " File FP exists on system disk and is older than the minimum required age of min_age minutes: FP = \"$FP\" - min_age = ${min_age} min" + min_age = ${min_age} minutes" + else - print_info_msg "\n\ + + print_info_msg " File FP exists on system disk and but is NOT older than the minumum re- quired age of min_age minutes: FP = \"$FP\" - min_age = ${min_age} min + min_age = ${min_age} minutes Will try fetching all external model files from HPSS. Not checking pre- sence and age of remaining external model files on system disk." break + fi else - print_info_msg "\n\ + print_info_msg " File FP does NOT exist on system disk: FP = \"$FP\" Will try fetching all external model files from HPSS. Not checking pre- @@ -142,30 +159,30 @@ fi # EXTRN_MDL_FNS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FNS[@]}" )")" -if [ "$DATA_SRC" = "disk" ]; then +if [ "${DATA_SRC}" = "disk" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then - print_info_msg "\ + print_info_msg " Creating links in local directory (EXTRN_MDL_FILES_DIR) to external mo- del files (EXTRN_MDL_FNS) in the system directory on disk (EXTRN_MDL_- SYSDIR): - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - EXTRN_MDL_SYSDIR = \"$EXTRN_MDL_SYSDIR\" - EXTRN_MDL_FNS = $EXTRN_MDL_FNS_str -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + EXTRN_MDL_SYSDIR = \"${EXTRN_MDL_SYSDIR}\" + EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str}" + ln_vrfy -sf -t ${EXTRN_MDL_FILES_DIR} ${EXTRN_MDL_FPS[@]} else - print_info_msg "\ + print_info_msg " Copying external model files (EXTRN_MDL_FNS) from the system directory on disk (EXTRN_MDL_SYSDIR) to local directory (EXTRN_MDL_FILES_DIR): - EXTRN_MDL_SYSDIR = \"$EXTRN_MDL_SYSDIR\" - EXTRN_MDL_FNS = $EXTRN_MDL_FNS_str - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" -" - cp_vrfy ${EXTRN_MDL_FPS[@]} $EXTRN_MDL_FILES_DIR + EXTRN_MDL_SYSDIR = \"${EXTRN_MDL_SYSDIR}\" + EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str} + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\"" + + cp_vrfy ${EXTRN_MDL_FPS[@]} ${EXTRN_MDL_FILES_DIR} fi # @@ -175,24 +192,28 @@ on disk (EXTRN_MDL_SYSDIR) to local directory (EXTRN_MDL_FILES_DIR): # #----------------------------------------------------------------------- # - if [ "$ICS_OR_LBCS" = "ICS" ]; then + if [ "${ICS_OR_LBCS}" = "ICS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== Successfully copied or linked to external model files on system disk needed for generating initial conditions and surface fields for the FV3 forecast!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" - elif [ "$ICS_OR_LBCS" = "LBCS" ]; then + elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== Successfully copied or linked to external model files on system disk needed for generating lateral boundary conditions for the FV3 fore- cast!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" fi @@ -204,7 +225,7 @@ Exiting script: \"${script_name}\" # #----------------------------------------------------------------------- # -elif [ "$DATA_SRC" = "HPSS" ]; then +elif [ "${DATA_SRC}" = "HPSS" ]; then # #----------------------------------------------------------------------- # @@ -213,30 +234,20 @@ elif [ "$DATA_SRC" = "HPSS" ]; then # #----------------------------------------------------------------------- # - prefix=${EXTRN_MDL_ARCVREL_DIR:+$EXTRN_MDL_ARCVREL_DIR/} + prefix=${EXTRN_MDL_ARCVREL_DIR:+${EXTRN_MDL_ARCVREL_DIR}/} EXTRN_MDL_FPS=( "${EXTRN_MDL_FNS[@]/#/$prefix}" ) EXTRN_MDL_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FPS[@]}" )")" EXTRN_MDL_ARCV_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_ARCV_FPS[@]}" )")" - print_info_msg "\n\ + print_info_msg " Fetching model output files from HPSS. The model output files (EXTRN_- MDL_FPS), the archive files on HPSS in which these output files are stored (EXTRN_MDL_ARCV_FPS), and the local directory into which they will be copied (EXTRN_MDL_FILES_DIR) are: - EXTRN_MDL_FPS = $EXTRN_MDL_FPS_str - EXTRN_MDL_ARCV_FPS = $EXTRN_MDL_ARCV_FPS_str - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\"" -# -#----------------------------------------------------------------------- -# -# Load necessary modules. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - module load hpss - { restore_shell_opts; } > /dev/null 2>&1 + EXTRN_MDL_FPS = ${EXTRN_MDL_FPS_str} + EXTRN_MDL_ARCV_FPS = ${EXTRN_MDL_ARCV_FPS_str} + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\"" # #----------------------------------------------------------------------- # @@ -253,7 +264,7 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # #----------------------------------------------------------------------- # - if [ "$EXTRN_MDL_ARCV_FMT" = "tar" ]; then + if [ "${EXTRN_MDL_ARCV_FMT}" = "tar" ]; then # #----------------------------------------------------------------------- # @@ -264,7 +275,7 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # num_files_to_extract="${#EXTRN_MDL_FPS[@]}" - for (( narcv=0; narcv<$num_arcv_files; narcv++ )); do + for (( narcv=0; narcv<${num_arcv_files}; narcv++ )); do narcv_formatted=$( printf "%02d" $narcv ) ARCV_FP="${EXTRN_MDL_ARCV_FPS[$narcv]}" @@ -287,16 +298,15 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # HTAR_LOG_FN="log.htar_tvf.${narcv_formatted}" htar -tvf ${ARCV_FP} ${EXTRN_MDL_FPS[@]} >& ${HTAR_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ htar file list operation (\"htar -tvf ...\") failed. Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HTAR_LOG_FN = \"$HTAR_LOG_FN\" -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\"" i=0 files_in_crnt_arcv=() - for (( nfile=0; nfile<$num_files_to_extract; nfile++ )); do + for (( nfile=0; nfile<${num_files_to_extract}; nfile++ )); do extrn_mdl_fp="${EXTRN_MDL_FPS[$nfile]}" # grep -n ${extrn_mdl_fp} ${HTAR_LOG_FN} 2>&1 && { \ grep -n ${extrn_mdl_fp} ${HTAR_LOG_FN} > /dev/null 2>&1 && { \ @@ -311,14 +321,13 @@ HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: num_files_in_crnt_arcv=${#files_in_crnt_arcv[@]} if [ ${num_files_in_crnt_arcv} -eq 0 ]; then EXTRN_MDL_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FPS[@]}" )")" - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The current archive file (ARCV_FP) does not contain any of the external model files listed in EXTRN_MDL_FPS: - ARCV_FP = \"$ARCV_FP\" - EXTRN_MDL_FPS = $EXTRN_MDL_FPS_str + ARCV_FP = \"${ARCV_FP}\" + EXTRN_MDL_FPS = ${EXTRN_MDL_FPS_str} The archive file should contain at least one external model file; other- -wise, it would not be needed. -" +wise, it would not be needed." fi # # Extract from the current tar archive file on HPSS all the external mo- @@ -328,12 +337,11 @@ wise, it would not be needed. # HTAR_LOG_FN="log.htar_xvf.${narcv_formatted}" htar -xvf ${ARCV_FP} ${files_in_crnt_arcv[@]} >& ${HTAR_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ htar file extract operation (\"htar -xvf ...\") failed. Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HTAR_LOG_FN = \"$HTAR_LOG_FN\" -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\"" # # Note that the htar file extract operation above may return with a 0 # exit code (success) even if one or more (or all) external model output @@ -356,16 +364,15 @@ file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: fi grep -n "${FP}" "${HTAR_LOG_FN}" > /dev/null 2>&1 || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External model output file FP not extracted from tar archive file ARCV_- FP: - ARCV_FP = \"$ARCV_FP\" + ARCV_FP = \"${ARCV_FP}\" FP = \"$FP\" Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HTAR_LOG_FN = \"$HTAR_LOG_FN\" -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\"" done @@ -382,40 +389,46 @@ details: # #----------------------------------------------------------------------- # - if [ "$EXTRN_MDL_ARCVREL_DIR" != "." ]; then + if [ "${EXTRN_MDL_ARCVREL_DIR}" != "." ]; then # -# The code below works if the first character of EXTRN_MDL_ARCVREL_DIR -# is a "/", which is the only case encountered thus far. The code may -# have to be modified to accomodate the case of the first character of -# EXTRN_MDL_ARCVREL_DIR not being a "/". +# The code below works if EXTRN_MDL_ARCVREL_DIR starts with a "/" or a +# "./", which are the only case encountered thus far. The code may have +# to be modified to accomodate other cases. # - if [ "${EXTRN_MDL_ARCVREL_DIR:0:1}" = "/" -o \ - "${EXTRN_MDL_ARCVREL_DIR:0:2}" = "./" ]; then - - mv_vrfy $EXTRN_MDL_ARCVREL_DIR/* . + if [ "${EXTRN_MDL_ARCVREL_DIR:0:1}" = "/" ] || \ + [ "${EXTRN_MDL_ARCVREL_DIR:0:2}" = "./" ]; then +# +# Strip the "/" or "./" from the beginning of EXTRN_MDL_ARCVREL_DIR to +# obtain the relative directory from which to move the extracted files +# to the current directory. Then move the files. +# + rel_dir=$( printf "%s" "${EXTRN_MDL_ARCVREL_DIR}" | \ + sed -r 's%^(\/|\.\/)([^/]*)(.*)%\2\3%' ) + mv_vrfy ${rel_dir}/* . # -# Get the first subdirectory in EXTRN_MDL_ARCVREL_DIR, i.e. the directo- -# ry after the first forward slash. This is the subdirectory that we -# want to remove. +# Get the first subdirectory in rel_dir, i.e. the subdirectory before +# the first forward slash. This is the subdirectory that we want to re- +# move since it no longer contains any files (only subdirectories). +# Then remove it. # - subdir_to_remove=$( printf "%s" "${EXTRN_MDL_ARCVREL_DIR}" | \ - sed -r 's%^(\/|\.\/)([^/]*).*%\2%' ) - rm_vrfy -rf ./$subdir_to_remove + subdir_to_remove=$( printf "%s" "${rel_dir}" | \ + sed -r 's%^([^/]*)(.*)%\1%' ) + rm_vrfy -rf ./${subdir_to_remove} # # If EXTRN_MDL_ARCVREL_DIR does not start with a "/" (and it is not # equal to "."), then print out an error message and exit. # else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The archive-relative directory specified by EXTRN_MDL_ARCVREL_DIR [i.e. the directory \"within\" the tar file(s) listed in EXTRN_MDL_ARCV_FPS] is not the current directory (i.e. it is not \".\"), and it does not start -with a \"/\": - EXTRN_MDL_ARCVREL_DIR = \"$EXTRN_MDL_ARCVREL_DIR\" - EXTRN_MDL_ARCV_FPS = $EXTRN_MDL_ARCV_FPS_str -This script (\"${script_name}\) must be modified to account for this case. -" +with a \"/\" or a \"./\": + EXTRN_MDL_ARCVREL_DIR = \"${EXTRN_MDL_ARCVREL_DIR}\" + EXTRN_MDL_ARCV_FPS = ${EXTRN_MDL_ARCV_FPS_str} +This script must be modified to account for this case." + fi fi @@ -427,7 +440,7 @@ This script (\"${script_name}\) must be modified to account for this case. # #----------------------------------------------------------------------- # - elif [ "$EXTRN_MDL_ARCV_FMT" = "zip" ]; then + elif [ "${EXTRN_MDL_ARCV_FMT}" = "zip" ]; then # #----------------------------------------------------------------------- # @@ -440,13 +453,13 @@ This script (\"${script_name}\) must be modified to account for this case. # #----------------------------------------------------------------------- # - if [ "$num_arcv_files" -gt 1 ]; then - print_err_msg_exit "${script_name}" "\ + if [ "${num_arcv_files}" -gt 1 ]; then + print_err_msg_exit "\ Currently, this script is coded to handle only one archive file if the archive file format is specified to be \"zip\", but the number of archive files (num_arcv_files) passed to this script is greater than 1: - EXTRN_MDL_ARCV_FMT = \"$EXTRN_MDL_ARCV_FMT\" - num_arcv_files = $num_arcv_files + EXTRN_MDL_ARCV_FMT = \"${EXTRN_MDL_ARCV_FMT}\" + num_arcv_files = ${num_arcv_files} Please modify the script to handle more than one \"zip\" archive file. Note that code already exists in this script that can handle multiple archive files if the archive file format is specified to be \"tar\", so @@ -464,12 +477,11 @@ that can be used as a guide for the \"zip\" case." # HSI_LOG_FN="log.hsi_get" hsi get "${ARCV_FP}" >& ${HSI_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ hsi file get operation (\"hsi get ...\") failed. Check the log file HSI_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HSI_LOG_FN = \"$HSI_LOG_FN\" -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HSI_LOG_FN = \"${HSI_LOG_FN}\"" # #----------------------------------------------------------------------- # @@ -480,14 +492,13 @@ HSI_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: # UNZIP_LOG_FN="log.unzip_lv" unzip -l -v ${ARCV_FN} >& ${UNZIP_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ unzip operation to list the contents of the zip archive file ARCV_FN in the directory EXTRN_MDL_FILES_DIR failed. Check the log file UNZIP_- LOG_FN in that directory for details: - ARCV_FN = \"$ARCV_FN\" - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - UNZIP_LOG_FN = \"$UNZIP_LOG_FN\" -" + ARCV_FN = \"${ARCV_FN}\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\"" # #----------------------------------------------------------------------- # @@ -502,15 +513,14 @@ LOG_FN in that directory for details: # for FP in "${EXTRN_MDL_FPS[@]}"; do grep -n "${FP}" "${UNZIP_LOG_FN}" > /dev/null 2>&1 || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External model output file FP does not exist in the zip archive file ARCV_FN in the directory EXTRN_MDL_FILES_DIR. Check the log file UN- ZIP_LOG_FN in that directory for the contents of the zip archive: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - ARCV_FN = \"$ARCV_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + ARCV_FN = \"${ARCV_FN}\" FP = \"$FP\" - UNZIP_LOG_FN = \"$UNZIP_LOG_FN\" -" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\"" done # #----------------------------------------------------------------------- @@ -524,12 +534,11 @@ ZIP_LOG_FN in that directory for the contents of the zip archive: # UNZIP_LOG_FN="log.unzip" unzip -o "${ARCV_FN}" ${EXTRN_MDL_FPS[@]} >& ${UNZIP_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ unzip file extract operation (\"unzip -o ...\") failed. Check the log file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - UNZIP_LOG_FN = \"$UNZIP_LOG_FN\" -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\"" # # NOTE: # If EXTRN_MDL_ARCVREL_DIR is not empty, the unzip command above will @@ -550,23 +559,27 @@ file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: # #----------------------------------------------------------------------- # - if [ "$ICS_OR_LBCS" = "ICS" ]; then + if [ "${ICS_OR_LBCS}" = "ICS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== External model files needed for generating initial condition and surface fields for the FV3SAR successfully fetched from HPSS!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" - elif [ "$ICS_OR_LBCS" = "LBCS" ]; then + elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== External model files needed for generating lateral boundary conditions on the halo of the FV3SAR's regional grid successfully fetched from HPSS!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" fi @@ -581,3 +594,4 @@ fi #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 0bf8affcdb..1e962576ce 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -3,22 +3,21 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # -# Source file containing definitions of mathematical and physical con- -# stants. +# Source other necessary files. # #----------------------------------------------------------------------- # -. ${USHDIR}/constants.sh +. $USHDIR/constants.sh +. $USHDIR/link_fix.sh # #----------------------------------------------------------------------- # @@ -31,15 +30,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that generates grid files. ========================================================================" # @@ -54,30 +65,22 @@ This is the ex-script for the task that generates grid files. # valid_args=( "WORKDIR_LOCAL" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi - - # #----------------------------------------------------------------------- # -# Set the file names of the scripts to use for generating the grid -# files, the orography files, and for filtering the orography files, -# respectively. Also, set the name of the executable file used to -# "shave" (i.e. remove the halo from) certain grid and orography -# files. The shaving is needed only for the gtype="regional" case. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args +# +#----------------------------------------------------------------------- +# +# Set the file name of the script to use for generating the grid files +# and the name of the executable file used to "shave" (i.e. remove the +# halo from) certain grid files. # #----------------------------------------------------------------------- # @@ -128,48 +131,29 @@ case $MACHINE in ;; -"THEIA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - - . /apps/lmod/lmod/init/sh - module purge - module load intel/16.1.150 - module load impi - module load hdf5/1.8.14 - module load netcdf/4.3.0 - module list - - { restore_shell_opts; } > /dev/null 2>&1 - - export APRUN="time" - export topo_dir="/scratch4/NCEPDEV/global/save/glopara/svn/fv3gfs/fix/fix_orog" - - ulimit -s unlimited - ulimit -a - ;; - - "HERA") # - { save_shell_opts; set +x; } > /dev/null 2>&1 - - . /apps/lmod/lmod/init/sh - module purge - module load intel/18.0.5.274 - module load netcdf/4.7.0 - module load hdf5/1.10.5 - module list - - { restore_shell_opts; } > /dev/null 2>&1 - +# { save_shell_opts; set +x; } > /dev/null 2>&1 +# +# . /apps/lmod/lmod/init/sh +# module purge +# module load intel/18.0.5.274 +## module load netcdf/4.6.1 +## module load hdf5/1.10.4 +# module load netcdf/4.7.0 +# module load hdf5/1.10.5 +# module list +# +# { restore_shell_opts; } > /dev/null 2>&1 +# +# export APRUN="time" export APRUN="time" - export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" - - ulimit -s unlimited - ulimit -a + topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" +# +# ulimit -s unlimited +# ulimit -a ;; - +# "JET") # @@ -242,8 +226,8 @@ mkdir_vrfy -p "$tmpdir" # tains information only about tile 7 (i.e. it does not have any infor- # mation on how tiles 1 through 6 are connected or that tile 7 is within # tile 6). All these files will be placed in the directory specified by -# GRID_DIR. Note that the file for tile 7 will include a halo of -# width nhw_T7 cells. +# GRID_DIR. Note that the file for tile 7 will include a halo of width +# NHW cells. # # Since tiles 1 through 6 are not needed to run the FV3SAR model and are # not used later on in any other preprocessing steps, it is not clear @@ -252,7 +236,7 @@ mkdir_vrfy -p "$tmpdir" # lity/executable that grid_gen_scr calls, i.e. it might be because with # make_hgrid, one has to either create just the 6 global tiles or create # the 6 global tiles plus the regional (tile 7), and then for the case -# of a regional simulation (i.e. gtype="regional", which is always the +# of a regional simulation (i.e. GTYPE="regional", which is always the # case here) just not use the 6 global tiles. # # The grid_gen_scr script called below takes its next-to-last argument @@ -263,12 +247,17 @@ mkdir_vrfy -p "$tmpdir" # size specified by the argument to the --halo flag does not extend be- # yond the boundaries of the parent grid (tile 6). In this case, since # the values passed to the --istart_nest, ..., and --jend_nest flags al- -# ready include a halo (because these arguments are $istart_rgnl_with_- -# halo_T6SG, $iend_rgnl_wide_halo_T6SG, $jstart_rgnl_wide_halo_T6SG, and -# $jend_rgnl_wide_halo_T6SG), it is reasonable to pass as the argument -# to --halo a zero. However, make_hgrid requires that the argument to -# --halo be at least 1, so below, we pass a 1 as the next-to-last argu- -# ment to grid_gen_scr. +# ready include a halo (because these arguments are +# +# ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, +# ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, +# ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, and +# ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, +# +# i.e. they include "WITH_WIDE_HALO_" in their names), it is reasonable +# to pass as the argument to --halo a zero. However, make_hgrid re- +# quires that the argument to --halo be at least 1, so below, we pass a +# 1 as the next-to-last argument to grid_gen_scr. # # More information on make_hgrid: # ------------------------------ @@ -280,14 +269,15 @@ mkdir_vrfy -p "$tmpdir" # --grid_type gnomonic_ed \ # --nlon 2*${RES} \ # --grid_name C${RES}_grid \ -# --do_schmidt --stretch_factor ${stretch_fac} \ -# --target_lon ${lon_ctr_T6} --target_lat ${lat_ctr_T6} \ -# --nest_grid --parent_tile 6 --refine_ratio ${refine_ratio} \ -# --istart_nest ${istart_rgnl_wide_halo_T6SG} \ -# --jstart_nest ${jstart_rgnl_wide_halo_T6SG} \ -# --iend_nest ${iend_rgnl_wide_halo_T6SG} \ -# --jend_nest ${jend_rgnl_wide_halo_T6SG} \ -# --halo ${nh3_T7} \ +# --do_schmidt --stretch_factor ${STRETCH_FAC} \ +# --target_lon ${LON_CTR} +# --target_lat ${LAT_CTR} \ +# --nest_grid --parent_tile 6 --refine_ratio ${GFDLgrid_REFINE_RATIO} \ +# --istart_nest ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --jstart_nest ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --iend_nest ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --jend_nest ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --halo ${NH3} \ # --great_circle_algorithm # # This creates the 7 grid files ${CRES}_grid.tileN.nc for N=1,...,7. @@ -295,14 +285,14 @@ mkdir_vrfy -p "$tmpdir" # the extents of the arrays in that file do not seem to include a halo, # i.e. they are based only on the values passed via the four flags # -# --istart_nest ${istart_rgnl_wide_halo_T6SG} -# --jstart_nest ${jstart_rgnl_wide_halo_T6SG} -# --iend_nest ${iend_rgnl_wide_halo_T6SG} -# --jend_nest ${jend_rgnl_wide_halo_T6SG} +# --istart_nest ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} +# --jstart_nest ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} +# --iend_nest ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} +# --jend_nest ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} # # According to Rusty Benson of GFDL, the flag # -# --halo ${nh3_T7} +# --halo ${NH3} # # only checks to make sure that the nested or regional grid combined # with the specified halo lies completely within the parent tile. If @@ -320,104 +310,167 @@ mkdir_vrfy -p "$tmpdir" # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting grid file generation..." +print_info_msg "$VERBOSE" " +Starting grid file generation..." +tile_rgnl=7 +res="" +# +#----------------------------------------------------------------------- +# +# Consider a GFDLgrid-type of grid. +# +#----------------------------------------------------------------------- +# if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - $USHDIR/$grid_gen_scr \ - $RES \ + $USHDIR/${grid_gen_scr} \ + ${GFDLgrid_RES} \ $tmpdir \ - ${stretch_fac} ${lon_ctr_T6} ${lat_ctr_T6} ${refine_ratio} \ - ${istart_rgnl_wide_halo_T6SG} ${jstart_rgnl_wide_halo_T6SG} \ - ${iend_rgnl_wide_halo_T6SG} ${jend_rgnl_wide_halo_T6SG} \ + ${STRETCH_FAC} ${LON_CTR} ${LAT_CTR} ${GFDLgrid_REFINE_RATIO} \ + ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ + ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ + ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ + ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ 1 $USHDIR || \ - print_err_msg_exit "${script_name}" "\ -Call to script that generates grid files returned with nonzero exit code." + print_err_msg_exit "\ +Call to script that generates grid files returned with nonzero exit +code." - tile_rgnl=7 - grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" - $EXECDIR/global_equiv_resol "${grid_fp}" || \ - print_err_msg_exit "${script_name}" "\ -Call to executable that calculates equivalent global uniform cubed sphere -resolution returned with nonzero exit code." + if [ "${GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES}" = "TRUE" ]; then - RES_equiv=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") - RES_equiv=${RES_equiv//$'\n'/} -printf "%s\n" "RES_equiv = $RES_equiv" - CRES_equiv="C${RES_equiv}" -printf "%s\n" "CRES_equiv = $CRES_equiv" + res=${GFDLgrid_RES} + CRES="C$res" -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + else + + grid_fp="$tmpdir/C${GFDLgrid_RES}_grid.tile${tile_rgnl}.nc" + $EXECDIR/global_equiv_resol "${grid_fp}" || \ + print_err_msg_exit "\ +Call to executable that calculates equivalent global uniform cubed +sphere resolution returned with nonzero exit code." + + res=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) || \ + print_err_msg_exit "\ +Attempt to extract the equivalent global uniform cubed-sphere grid reso- +lution from the file specified by grid_fp faild: + grid_fp = \"${grid_fp}\"" + res=${res//$'\n'/} + CRES="C$res" + + grid_fp_orig="${grid_fp}" + grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" + mv_vrfy ${grid_fp_orig} ${grid_fp} + + mosaic_fp_orig="$tmpdir/C${GFDLgrid_RES}_mosaic.nc" + mosaic_fp="$tmpdir/${CRES}_mosaic.nc" + mv_vrfy ${mosaic_fp_orig} ${mosaic_fp} + + fi + +printf "%s\n" "res = $res" +printf "%s\n" "CRES = $CRES" # #----------------------------------------------------------------------- # -# Set the full path to the namelist file for the executable that gene- -# rates a regional grid using Jim Purser's method. Then set parameters -# in that file. +# Consider a JPgrid-type of grid. # #----------------------------------------------------------------------- # - RGNL_GRID_NML_FP="$tmpdir/${RGNL_GRID_NML_FN}" - cp_vrfy ${TEMPLATE_DIR}/${RGNL_GRID_NML_FN} ${RGNL_GRID_NML_FP} - - print_info_msg_verbose "\ -Setting parameters in file: - RGNL_GRID_NML_FP = \"$RGNL_GRID_NML_FP\"" +elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # -# Set parameters. +# Copy the template namelist file for the JPgrid-type grid generation +# code to the temporary subdirectory. Then replace the placeholders in +# that file with actual values. # - set_file_param "$RGNL_GRID_NML_FP" "plon" "$lon_rgnl_ctr" - set_file_param "$RGNL_GRID_NML_FP" "plat" "$lat_rgnl_ctr" - set_file_param "$RGNL_GRID_NML_FP" "delx" "$del_angle_x_SG" - set_file_param "$RGNL_GRID_NML_FP" "dely" "$del_angle_y_SG" - set_file_param "$RGNL_GRID_NML_FP" "lx" "$mns_nx_T7_pls_wide_halo" - set_file_param "$RGNL_GRID_NML_FP" "ly" "$mns_ny_T7_pls_wide_halo" - set_file_param "$RGNL_GRID_NML_FP" "a" "$a_grid_param" - set_file_param "$RGNL_GRID_NML_FP" "k" "$k_grid_param" + rgnl_grid_nml_fp="$tmpdir/${RGNL_GRID_NML_FN}" + cp_vrfy ${TEMPLATE_DIR}/${RGNL_GRID_NML_FN} ${rgnl_grid_nml_fp} + + print_info_msg "$VERBOSE" " +Setting parameters in file: + rgnl_grid_nml_fp = \"${rgnl_grid_nml_fp}\"" + + set_file_param "${rgnl_grid_nml_fp}" "plon" "${LON_CTR}" + set_file_param "${rgnl_grid_nml_fp}" "plat" "${LAT_CTR}" + set_file_param "${rgnl_grid_nml_fp}" "delx" "${DEL_ANGLE_X_SG}" + set_file_param "${rgnl_grid_nml_fp}" "dely" "${DEL_ANGLE_Y_SG}" + set_file_param "${rgnl_grid_nml_fp}" "lx" "${NEG_NX_OF_DOM_WITH_WIDE_HALO}" + set_file_param "${rgnl_grid_nml_fp}" "ly" "${NEG_NY_OF_DOM_WITH_WIDE_HALO}" + set_file_param "${rgnl_grid_nml_fp}" "a" "${JPgrid_ALPHA_PARAM}" + set_file_param "${rgnl_grid_nml_fp}" "k" "${JPgrid_KAPPA_PARAM}" cd_vrfy $tmpdir - $EXECDIR/regional_grid ${RGNL_GRID_NML_FP} || \ - print_err_msg_exit "${script_name}" "\ -Call to executable that generates grid file (Jim Purser version) returned -with nonzero exit code." + $EXECDIR/regional_grid ${rgnl_grid_nml_fp} || \ + print_err_msg_exit "\ +Call to executable that generates grid file (Jim Purser version) re- +turned with nonzero exit code." - tile_rgnl=7 grid_fp="$tmpdir/regional_grid.nc" $EXECDIR/global_equiv_resol "${grid_fp}" || \ - print_err_msg_exit "${script_name}" "\ -Call to executable that calculates equivalent global uniform cubed sphere -resolution returned with nonzero exit code." + print_err_msg_exit "\ +Call to executable that calculates equivalent global uniform cubed +sphere resolution returned with nonzero exit code." - RES_equiv=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) # Need error checking here. - RES_equiv=${RES_equiv//$'\n'/} -printf "%s\n" "RES_equiv = $RES_equiv" - CRES_equiv="C${RES_equiv}" -printf "%s\n" "CRES_equiv = $CRES_equiv" + res=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) || \ + print_err_msg_exit "\ +Attempt to extract the equivalent global uniform cubed-sphere grid reso- +lution from the file specified by grid_fp faild: + grid_fp = \"${grid_fp}\"" + res=${res//$'\n'/} + CRES="C$res" grid_fp_orig="${grid_fp}" - grid_fp="$tmpdir/${CRES_equiv}_grid.tile${tile_rgnl}.nc" + grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" mv_vrfy ${grid_fp_orig} ${grid_fp} - $EXECDIR/mosaic_file $CRES_equiv || \ - print_err_msg_exit "${script_name}" "\ +printf "%s\n" "res = $res" +printf "%s\n" "CRES = $CRES" +# +# Create a grid mosaic file that relates the tiles of the cubed-sphere +# grid. Note that there are no "tiles" in the case of a JPgrid-type +# grid, but this file must nevertheless exist because the forecast mo- +# del code looks for it. +# + $EXECDIR/mosaic_file $CRES || \ + print_err_msg_exit "\ Call to executable that creates a grid mosaic file returned with nonzero exit code." + +fi # -# RES and CRES need to be set here in order for the rest of the script -# (that was originally written for a grid with GRID_GEN_METHOD set to -# "GFDLgrid") to work for a grid with GRID_GEN_METHOD set to "JPgrid". +#----------------------------------------------------------------------- # - RES="$RES_equiv" - CRES="$CRES_equiv" - - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "$RES" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "$CRES" - +# If there are pre-existing orography or climatology files we will be +# using (i.e. if RUN_TASK_MAKE_OROG or RUN_TASK_MAKE_SURF_CLIMO is set +# to "FALSE", in which case RES_IN_FIXSAR_FILENAMES will not be set to a +# null string), check that the grid resolution (res) calculated above +# matches the resolution appearing in the names of the preexisting oro- +# graphy or surface climatology files. +# +#----------------------------------------------------------------------- +# +if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ]; then + if [ "$res" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ +The resolution (res) calculated for the grid does not match the resolu- +tion (RES_IN_FIXSAR_FILENAMES) appearing in the names of the orography +and/or surface climatology files: + res = $res + RES_IN_FIXSAR_FILENAMES = \"${RES_IN_FIXSAR_FILENAMES}\"" + fi fi # #----------------------------------------------------------------------- # +# Set CRES in the variable definitions file. +# +#----------------------------------------------------------------------- +# +set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "\"$CRES\"" +# +#----------------------------------------------------------------------- +# # For clarity, rename the tile 7 grid file such that its new name con- # tains the halo size. Then create a link whose name doesn't contain # the halo size that points to this file. @@ -426,11 +479,12 @@ fi # cd_vrfy $tmpdir mv_vrfy ${CRES}_grid.tile${TILE_RGNL}.nc \ - ${CRES}_grid.tile${TILE_RGNL}.halo${nhw_T7}.nc + ${CRES}_grid.tile${TILE_RGNL}.halo${NHW}.nc mv_vrfy ${CRES}_mosaic.nc ${GRID_DIR} cd_vrfy - -print_info_msg_verbose "Grid file generation complete." +print_info_msg "$VERBOSE" " +Grid file generation complete." # #----------------------------------------------------------------------- # @@ -449,37 +503,36 @@ print_info_msg_verbose "Grid file generation complete." # wide halo. This is the input grid file for generating both the grid # file with a 3-cell-wide halo and the one with a 4-cell-wide halo. # -unshaved_fp="$tmpdir/${CRES}_grid.tile${TILE_RGNL}.halo${nhw_T7}.nc" +unshaved_fp="$tmpdir/${CRES}_grid.tile${TILE_RGNL}.halo${NHW}.nc" # # We perform the work in tmpdir, so change location to that directory. # Once it is complete, we move the resultant file from tmpdir to GRID_- # DIR. # -cd_vrfy ${tmpdir} +cd_vrfy $tmpdir # # Create an input namelist file for the shave executable to generate a # grid file with a 3-cell-wide halo from the one with a wide halo. Then # call the shave executable. Finally, move the resultant file to the # GRID_DIR directory. # -print_info_msg_verbose "\ -\"Shaving\" grid file with wide halo to obtain grid file with ${nh3_T7}-cell- -wide halo..." +print_info_msg "$VERBOSE" " +\"Shaving\" grid file with wide halo to obtain grid file with ${NH3}-cell-wide +halo..." -nml_fn="input.shave.grid.halo${nh3_T7}" -shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${nh3_T7}.nc" +nml_fn="input.shave.grid.halo${NH3}" +shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" printf "%s %s %s %s %s\n" \ - ${nx_T7} ${ny_T7} ${nh3_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH3} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ -Call to executable \"${shave_exec}\" to generate a grid file with a ${nh3_T7}- -cell-wide-halo returned with nonzero exit code. The namelist file -nml_fn is in directory tmpdir: +print_err_msg_exit "\ +Call to executable \"${shave_exec}\" to generate a grid file with a ${NH3}-cell-wide +halo returned with nonzero exit code. The namelist file nml_fn is in +directory tmpdir: tmpdir = \"${tmpdir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate an @@ -487,24 +540,23 @@ mv_vrfy ${shaved_fp} ${GRID_DIR} # call the shave executable. Finally, move the resultant file to the # GRID_DIR directory. # -print_info_msg_verbose "\ -\"Shaving\" grid file with wide halo to obtain grid file with ${nh4_T7}-cell- -wide halo..." +print_info_msg "$VERBOSE" " +\"Shaving\" grid file with wide halo to obtain grid file with ${NH4}-cell-wide +halo..." -nml_fn="input.shave.grid.halo${nh4_T7}" -shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" +nml_fn="input.shave.grid.halo${NH4}" +shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ - ${nx_T7} ${ny_T7} ${nh4_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ -Call to executable \"${shave_exec}\" to generate a grid file with a ${nh4_T7}- -cell-wide-halo returned with nonzero exit code. The namelist file -nml_fn is in directory tmpdir: +print_err_msg_exit "\ +Call to executable \"${shave_exec}\" to generate a grid file with a ${NH4}-cell-wide +halo returned with nonzero exit code. The namelist file nml_fn is in +directory tmpdir: tmpdir = \"${tmpdir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${GRID_DIR} # # Change location back to the directory before tmpdir. @@ -519,13 +571,11 @@ cd_vrfy - # #----------------------------------------------------------------------- # -$USHDIR/link_fix.sh \ - verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ +link_fix \ + verbose="$VERBOSE" \ file_group="grid" || \ - print_err_msg_exit "\ -Call to script to create links to grid files failed. -" +print_err_msg_exit "\ +Call to function to create links to grid files failed." # #----------------------------------------------------------------------- # @@ -533,10 +583,12 @@ Call to script to create links to grid files failed. # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Grid files with various halo widths generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ics.sh similarity index 82% rename from scripts/exregional_make_ic_lbc0.sh rename to scripts/exregional_make_ics.sh index 08b1cced43..4c6a5ca151 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ics.sh @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -18,19 +17,31 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that generates initial condition (IC), surface, and zeroth hour lateral boundary condition (LBC0) files for FV3 (in NetCDF format). @@ -45,23 +56,25 @@ for FV3 (in NetCDF format). # #----------------------------------------------------------------------- # -valid_args=( "EXTRN_MDL_FNS" "EXTRN_MDL_FILES_DIR" "EXTRN_MDL_CDATE" \ - "WGRIB2_DIR" "APRUN" "ICS_DIR" ) +valid_args=( \ +"EXTRN_MDL_FNS" \ +"EXTRN_MDL_FILES_DIR" \ +"EXTRN_MDL_CDATE" \ +"WGRIB2_DIR" \ +"APRUN" \ +"ICS_DIR" \ +) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -80,18 +93,20 @@ cd_vrfy $workdir # #----------------------------------------------------------------------- # +phys_suite="" + case "${CCPP_PHYS_SUITE}" in -"GFS") +"FV3_GFS_2017_gfdlmp") phys_suite="GFS" ;; -"GSD") +"FV3_GSD_v0" | "FV3_GSD_SAR") phys_suite="GSD" ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Physics-suite-dependent namelist variables have not yet been specified for this physics suite: CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" @@ -140,6 +155,11 @@ esac # rain, and water number concentrations -- may be specified at the end # of tracers, and these will be calculated by chgres. # +# internal_GSD: +# Logical variable indicating whether or not to try to read in land sur- +# face model (LSM) variables available in the HRRRX grib2 files created +# after about 2019111500. +# # numsoil_out: # The number of soil layers to include in the output NetCDF file. # @@ -209,6 +229,7 @@ fn_grib2="" input_type="" tracers_input="\"\"" tracers="\"\"" +internal_GSD="" numsoil_out="" geogrid_file_input_grid="" replace_vgtyp="" @@ -217,7 +238,7 @@ replace_vgfrc="" tg3_from_soil="" -case "$EXTRN_MDL_NAME_ICS" in +case "${EXTRN_MDL_NAME_ICS}" in "GSMGFS") @@ -231,6 +252,7 @@ case "$EXTRN_MDL_NAME_ICS" in tracers_input="\"spfh\",\"clwmr\",\"o3mr\"" tracers="\"sphum\",\"liq_wat\",\"o3mr\"" + internal_GSD=".false." numsoil_out="4" replace_vgtyp=".true." replace_sotyp=".true." @@ -242,7 +264,7 @@ case "$EXTRN_MDL_NAME_ICS" in "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT_ICS}" = "nemsio" ]; then external_model="FV3GFS" @@ -260,9 +282,10 @@ case "$EXTRN_MDL_NAME_ICS" in # external model file type, and physics suite). # if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" - elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then # For GSD physics, add three additional tracers (the ice, rain and water # number concentrations) that are required for Thompson microphysics. tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"ice_nc\",\"rain_nc\",\"water_nc\"" @@ -275,7 +298,7 @@ case "$EXTRN_MDL_NAME_ICS" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT_ICS}" = "grib2" ]; then external_model="GFS" @@ -284,6 +307,7 @@ case "$EXTRN_MDL_NAME_ICS" in fi + internal_GSD=".false." numsoil_out="4" replace_vgtyp=".true." replace_sotyp=".true." @@ -300,15 +324,28 @@ case "$EXTRN_MDL_NAME_ICS" in fn_grib2="${EXTRN_MDL_FNS[0]}" input_type="grib2" + internal_GSD=".false." + cdate_min_HRRRX="2019111500" + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ] && \ + [ ${CDATE} -gt ${cdate_min_HRRRX} ]; then + print_info_msg " +Setting the chgres_cube namelist setting \"internal_GSD\" to \".true.\" in +order to read in land surface model (LSM) variables available in the +HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." + internal_GSD=".true." + fi + if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi - geogrid_file_input_grid="/scratch2/BMC/det/beck/SAR-FV3/geo_em.d01.nc" # Maybe make this a fix file? + geogrid_file_input_grid="/scratch2/BMC/det/beck/SAR-FV3/geo_em.d01.nc_HRRRX" # Maybe make this a fix file? replace_vgtyp=".false." replace_sotyp=".false." replace_vgfrc=".false." @@ -316,9 +353,34 @@ case "$EXTRN_MDL_NAME_ICS" in ;; +"RAPX") + + external_model="RAP" + + fn_grib2="${EXTRN_MDL_FNS[0]}" + input_type="grib2" + + internal_GSD=".false." + + if [ "${USE_CCPP}" = "TRUE" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then + numsoil_out="4" + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + numsoil_out="9" + fi + fi + + geogrid_file_input_grid="/scratch2/BMC/det/beck/SAR-FV3/geo_em.d01.nc_RAPX" # Maybe make this a fix file? + replace_vgtyp=".false." + replace_sotyp=".false." + replace_vgfrc=".false." + tg3_from_soil=".true." + + ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External-model-dependent namelist variables have not yet been specified for this external model: EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" @@ -399,11 +461,11 @@ hh="${EXTRN_MDL_CDATE:8:2}" fix_dir_target_grid="${FIXsar}" mosaic_file_target_grid="${FIXsar}/${CRES}_mosaic.nc" orog_dir_target_grid="${FIXsar}" - orog_files_target_grid="${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" + orog_files_target_grid="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" vcoord_file_target_grid="${FIXam}/global_hyblev.l65.txt" mosaic_file_input_grid="" orog_dir_input_grid="" - base_install_dir="${SORCDIR}/UFS_UTILS_chgres_grib2" + base_install_dir="${CHGRES_DIR}" wgrib2_path="${WGRIB2_DIR}" data_dir_input_grid="${EXTRN_MDL_FILES_DIR}" atm_files_input_grid="${fn_atm_nemsio}" @@ -416,12 +478,13 @@ hh="${EXTRN_MDL_CDATE:8:2}" convert_sfc=.true. convert_nst=.false. regional=1 - halo_bndy=${nh4_T7} + halo_bndy=${NH4} input_type="${input_type}" external_model="${external_model}" tracers_input=${tracers_input} tracers=${tracers} phys_suite="${phys_suite}" + internal_GSD=${internal_GSD} numsoil_out=${numsoil_out} geogrid_file_input_grid="${geogrid_file_input_grid}" replace_vgtyp=${replace_vgtyp} @@ -430,7 +493,7 @@ hh="${EXTRN_MDL_CDATE:8:2}" tg3_from_soil=${tg3_from_soil} / EOF -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ \"cat\" command to create a namelist file for chgres_cube to generate ICs, surface fields, and the 0-th hour (initial) LBCs returned with nonzero status." @@ -450,7 +513,7 @@ status." # A similar thing happens in the forecast task. # ${APRUN} ${EXECDIR}/chgres_cube.exe || \ -print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to executable to generate surface and initial conditions files for the FV3SAR failed: EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" @@ -464,10 +527,10 @@ the FV3SAR failed: #----------------------------------------------------------------------- # mv_vrfy out.atm.tile${TILE_RGNL}.nc \ - ${ICS_DIR}/gfs_data.tile${TILE_RGNL}.halo${nh0_T7}.nc + ${ICS_DIR}/gfs_data.tile${TILE_RGNL}.halo${NH0}.nc mv_vrfy out.sfc.tile${TILE_RGNL}.nc \ - ${ICS_DIR}/sfc_data.tile${TILE_RGNL}.halo${nh0_T7}.nc + ${ICS_DIR}/sfc_data.tile${TILE_RGNL}.halo${NH0}.nc mv_vrfy gfs_ctrl.nc ${ICS_DIR} @@ -479,11 +542,13 @@ mv_vrfy gfs_bndy.nc ${ICS_DIR}/gfs_bndy.tile${TILE_RGNL}.000.nc # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Initial condition, surface, and zeroth hour lateral boundary condition files (in NetCDF format) for FV3 generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbcs.sh similarity index 85% rename from scripts/exregional_make_lbc1_to_lbcn.sh rename to scripts/exregional_make_lbcs.sh index 15608e3c18..fe273bbbb1 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbcs.sh @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -18,19 +17,31 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that generates lateral boundary con- dition (LBC) files (in NetCDF format) for all LBC update hours (except hour zero). @@ -45,23 +56,26 @@ hour zero). # #----------------------------------------------------------------------- # -valid_args=("EXTRN_MDL_FNS" "EXTRN_MDL_FILES_DIR" "EXTRN_MDL_CDATE" "WGRIB2_DIR" \ - "APRUN" "LBCS_DIR" "EXTRN_MDL_LBC_UPDATE_FHRS") +valid_args=( \ +"EXTRN_MDL_FNS" \ +"EXTRN_MDL_FILES_DIR" \ +"EXTRN_MDL_CDATE" \ +"WGRIB2_DIR" \ +"APRUN" \ +"LBCS_DIR" \ +"EXTRN_MDL_LBC_UPDATE_FHRS" \ +) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -80,18 +94,20 @@ cd_vrfy $workdir # #----------------------------------------------------------------------- # +phys_suite="" + case "${CCPP_PHYS_SUITE}" in -"GFS") +"FV3_GFS_2017_gfdlmp") phys_suite="GFS" ;; -"GSD") +"FV3_GSD_v0" | "FV3_GSD_SAR") phys_suite="GSD" ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Physics-suite-dependent namelist variables have not yet been specified for this physics suite: CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" @@ -214,7 +230,7 @@ replace_vgfrc="" tg3_from_soil="" -case "$EXTRN_MDL_NAME_LBCS" in +case "${EXTRN_MDL_NAME_LBCS}" in "GSMGFS") @@ -237,7 +253,7 @@ case "$EXTRN_MDL_NAME_LBCS" in "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then external_model="FV3GFS" @@ -252,9 +268,10 @@ case "$EXTRN_MDL_NAME_LBCS" in # external model file type, and physics suite). # if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" - elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then # For GSD physics, add three additional tracers (the ice, rain and water # number concentrations) that are required for Thompson microphysics. tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"ice_nc\",\"rain_nc\",\"water_nc\"" @@ -267,7 +284,7 @@ case "$EXTRN_MDL_NAME_LBCS" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT_LBCS}" = "grib2" ]; then external_model="GFS" @@ -292,9 +309,10 @@ case "$EXTRN_MDL_NAME_LBCS" in input_type="grib2" if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi @@ -308,7 +326,7 @@ case "$EXTRN_MDL_NAME_LBCS" in *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External-model-dependent namelist variables have not yet been specified for this external model: EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" @@ -325,7 +343,7 @@ esac #----------------------------------------------------------------------- # num_fhrs="${#EXTRN_MDL_LBC_UPDATE_FHRS[@]}" -for (( i=0; i<$num_fhrs; i++ )); do +for (( i=0; i<${num_fhrs}; i++ )); do # # Get the forecast hour of the external model. # @@ -337,14 +355,14 @@ for (( i=0; i<$num_fhrs; i++ )); do fn_atm_nemsio="" fn_grib2="" - case "$EXTRN_MDL_NAME_LBCS" in + case "${EXTRN_MDL_NAME_LBCS}" in "GSMGFS") fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" ;; "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT_LBCS}" = "grib2" ]; then fn_grib2="${EXTRN_MDL_FNS[$i]}" fi ;; @@ -352,7 +370,7 @@ for (( i=0; i<$num_fhrs; i++ )); do fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The external model output file name to use in the chgres FORTRAN name- list file has not specified for this external model: EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" @@ -389,11 +407,11 @@ list file has not specified for this external model: fix_dir_target_grid="${FIXsar}" mosaic_file_target_grid="${FIXsar}/${CRES}_mosaic.nc" orog_dir_target_grid="${FIXsar}" - orog_files_target_grid="${CRES}_oro_data.tile7.halo${nh4_T7}.nc" + orog_files_target_grid="${CRES}_oro_data.tile7.halo${NH4}.nc" vcoord_file_target_grid="${FIXam}/global_hyblev.l65.txt" mosaic_file_input_grid="" orog_dir_input_grid="" - base_install_dir="${SORCDIR}/UFS_UTILS_chgres_grib2" + base_install_dir="${CHGRES_DIR}" wgrib2_path="${WGRIB2_DIR}" data_dir_input_grid="${EXTRN_MDL_FILES_DIR}" atm_files_input_grid="${fn_atm_nemsio}" @@ -406,7 +424,7 @@ list file has not specified for this external model: convert_sfc=.false. convert_nst=.false. regional=2 - halo_bndy=${nh4_T7} + halo_bndy=${NH4} input_type="${input_type}" external_model="${external_model}" tracers_input=${tracers_input} @@ -414,7 +432,7 @@ list file has not specified for this external model: phys_suite="${phys_suite}" / EOF - } || print_err_msg_exit "${script_name}" "\ + } || print_err_msg_exit "\ \"cat\" command to create a namelist file for chgres_cube to generate LBCs for all boundary update times (except the 0-th forecast hour) returned with nonzero status." @@ -422,7 +440,7 @@ with nonzero status." # Run chgres_cube. # ${APRUN} ${EXECDIR}/chgres_cube.exe || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Call to executable to generate lateral boundary conditions file for the the FV3SAR failed: EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" @@ -445,11 +463,13 @@ done # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Lateral boundary condition (LBC) files (in NetCDF format) generated suc- cessfully for all LBC update hours (except hour zero)!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 43dd32c421..2495b79deb 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -3,22 +3,20 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # -# Source file containing definitions of mathematical and physical con- -# stants. +# Source other necessary files. # #----------------------------------------------------------------------- # -. ${USHDIR}/constants.sh +. $USHDIR/link_fix.sh # #----------------------------------------------------------------------- # @@ -27,20 +25,32 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" -This is the ex-script for the task that generates grid files. +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the ex-script for the task that generates orography files. ========================================================================" # #----------------------------------------------------------------------- @@ -54,22 +64,16 @@ This is the ex-script for the task that generates grid files. # valid_args=( "WORKDIR_LOCAL" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi - - +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -95,11 +99,10 @@ ufs_utils_ushdir="${UFS_UTILS_DIR}/ush" # #----------------------------------------------------------------------- # -# Set the file names of the scripts to use for generating the grid -# files, the orography files, and for filtering the orography files, -# respectively. Also, set the name of the executable file used to -# "shave" (i.e. remove the halo from) certain grid and orography -# files. The shaving is needed only for the gtype="regional" case. +# Set the file names of the scripts to use for generating the raw oro- +# graphy files and for filtering the latter to obtain filtered orography +# files. Also, set the name of the executable file used to "shave" +# (i.e. remove the halo from) certain orography files. # #----------------------------------------------------------------------- # @@ -174,23 +177,10 @@ case $MACHINE in "HERA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - - . /apps/lmod/lmod/init/sh - module purge - module load intel/18.0.5.274 - module load netcdf/4.7.0 - module load hdf5/1.10.4 - module list - - { restore_shell_opts; } > /dev/null 2>&1 - - export APRUN="time" - export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" - ulimit -s unlimited ulimit -a + export APRUN="time" + export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" ;; @@ -241,6 +231,15 @@ export exec_dir="$EXECDIR" # #----------------------------------------------------------------------- # +# Extract the resolution from CRES and save it in the local variable +# res. +# +#----------------------------------------------------------------------- +# +res="${CRES:1}" +# +#----------------------------------------------------------------------- +# # Generate an orography file corresponding to tile 7 (the regional do- # main) only. # @@ -249,14 +248,15 @@ export exec_dir="$EXECDIR" # oro.${CRES}.tile7.nc # # and will place it in OROG_DIR. Note that this file will include -# orography for a halo of width nhw_T7 cells around tile 7. The follow- +# orography for a halo of width NHW cells around tile 7. The follow- # ing will also create a work directory called tile7 under OROG_DIR. # This work directory can be removed after the orography file has been # created (it is currently not deleted). # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting orography file generation..." +print_info_msg "$VERBOSE" " +Starting orography file generation..." tmp_dir="${raw_dir}/tmp" @@ -272,7 +272,7 @@ case $MACHINE in # printf "%s\n" "\ ${ufs_utils_ushdir}/${orog_gen_scr} \ -$RES \ +$res \ ${TILE_RGNL} \ ${FIXsar} \ ${raw_dir} \ @@ -288,8 +288,8 @@ ${tmp_dir}" \ "THEIA" | "HERA" | "JET" | "ODIN") ${ufs_utils_ushdir}/${orog_gen_scr} \ - $RES ${TILE_RGNL} ${FIXsar} ${raw_dir} ${UFS_UTILS_DIR} ${topo_dir} ${tmp_dir} || \ - print_err_msg_exit "${script_name}" "\ + $res ${TILE_RGNL} ${FIXsar} ${raw_dir} ${UFS_UTILS_DIR} ${topo_dir} ${tmp_dir} || \ + print_err_msg_exit "\ Call to script that generates raw orography file returned with nonzero exit code." ;; @@ -311,12 +311,13 @@ esac # cd_vrfy ${raw_dir} mv_vrfy oro.${CRES}.tile${TILE_RGNL}.nc \ - oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc -ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc \ + oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc +ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc \ oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - -print_info_msg_verbose "Orography file generation complete." +print_info_msg "$VERBOSE" " +Orography file generation complete." # #----------------------------------------------------------------------- # @@ -324,33 +325,35 @@ print_info_msg_verbose "Orography file generation complete." # #----------------------------------------------------------------------- # -print_info_msg_verbose "Setting orography filtering parameters..." +print_info_msg "$VERBOSE" " +Setting orography filtering parameters..." # Need to fix the following (also above). Then redo to get cell_size_avg. #cd_vrfy ${GRID_DIR} #$SORCDIR/regional_grid/regional_grid $RGNL_GRID_NML_FP $CRES || \ -# print_err_msg_exit "${script_name}" "\ -#Call to script that generates grid file (Jim Purser version) returned with nonzero exit code." -#${CRES}_grid.tile${TILE_RGNL}.halo${nhw_T7}.nc +#print_err_msg_exit "\ +#Call to script that generates grid file (Jim Purser version) returned +#with nonzero exit code." +#${CRES}_grid.tile${TILE_RGNL}.halo${NHW}.nc #if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then -# RES_eff=$( bc -l <<< "$RES*$refine_ratio" ) +# res_eff=$( bc -l <<< "$res*${GFDLgrid_REFINE_RATIO}" ) #elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then -# grid_size_eff=$( "($delx + $dely)/2" ) +# grid_size_eff=$( "(${JPgrid_DELX} + ${JPgrid_DELY})/2" ) #echo "grid_size_eff = $grid_size_eff" -# RES_eff=$( bc -l <<< "2*$pi_geom*$radius_Earth/(4*$grid_size_eff)" ) +# res_eff=$( bc -l <<< "2*$pi_geom*$radius_Earth/(4*$grid_size_eff)" ) #fi -#RES_eff=$( printf "%.0f\n" $RES_eff ) +#res_eff=$( printf "%.0f\n" ${res_eff} ) #echo -#echo "RES_eff = $RES_eff" +#echo "res_eff = $res_eff" # This will work for a JPgrid type of grid because for that case, RES # in the variable definitions file gets set to RES_equiv (by the make_- # grid task), but this won't work for a GFDLgrid type of grid because if # the stretch factor is not 1 in that case, RES_equiv will not be the # same as RES (because RES does not account for the stretch factor). -RES_equiv=$RES +RES_equiv=$res # Can also call it the "equivalent" global unstretched resolution. @@ -362,50 +365,50 @@ peak_fac_array=( "1.1" "1.1" "1.05" "1.0" "1.0" "1.0" "1.0") # Need to fix this so that the stderr from a failed call to interpol_to_arbit_CRES # gets sent to the stderr of this script. -cd4=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "cd4_array" ) || \ - print_err_msg_exit "${script_name}" "\ -Call to script that interpolated cd4 to the regional grid's equiavlent +var_names=( "cd4" "max_slope" "n_del2_weak" "peak_fac" ) +num_vars=${#var_names[@]} +for (( i=0; i<${num_vars}; i++ )); do + var_name=${var_names[$i]} + eval ${var_name}=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "${var_name}_array" ) || \ + print_err_msg_exit "\ +Call to script that interpolated ${var_name} to the regional grid's equiavlent global cubed-sphere resolution (RES_equiv) failed: - RES_equiv = \"${RES_equiv}\" -" -echo "====>>>> cd4 = $cd4" -# -max_slope=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "max_slope_array" ) -echo "====>>>> max_slope = $max_slope" -# -n_del2_weak=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "n_del2_weak_array" ) -# n_del2_weak is defined to be of integer type in the filter_topo code -# that uses it, so round it to the nearest integer. Otherwise, the code -# might break on some machines/compilers. -n_del2_weak=$( printf "%.0f" ${n_del2_weak} ) # cast to integer, Y. Wang -echo "====>>>> n_del2_weak = $n_del2_weak" -# -peak_fac=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "peak_fac_array" ) -echo "====>>>> peak_fac = $peak_fac" -# + RES_equiv = \"${RES_equiv}\"" + var_value=${!var_name} + echo "====>>>> ${var_name} = ${var_value}" +done if [ 0 = 1 ]; then -if [ $RES -eq 48 ]; then - export cd4=0.12; export max_slope=0.12; export n_del2_weak=4; export peak_fac=1.1 -elif [ $RES -eq 96 ]; then - export cd4=0.12; export max_slope=0.12; export n_del2_weak=8; export peak_fac=1.1 -elif [ $RES -eq 192 ]; then - export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.05 -elif [ $RES -eq 384 ]; then - export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.0 -elif [ $RES -eq 768 ]; then - export cd4=0.15; export max_slope=0.12; export n_del2_weak=16; export peak_fac=1.0 -elif [ $RES -eq 1152 ]; then - export cd4=0.15; export max_slope=0.16; export n_del2_weak=20; export peak_fac=1.0 -elif [ $RES -eq 3072 ]; then - export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 -else +case "$res" in + 48) + export cd4=0.12; export max_slope=0.12; export n_del2_weak=4; export peak_fac=1.1 + ;; + 96) + export cd4=0.12; export max_slope=0.12; export n_del2_weak=8; export peak_fac=1.1 + ;; + 192) + export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.05 + ;; + 384) + export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.0 + ;; + 768) + export cd4=0.15; export max_slope=0.12; export n_del2_weak=16; export peak_fac=1.0 + ;; + 1152) + export cd4=0.15; export max_slope=0.16; export n_del2_weak=20; export peak_fac=1.0 + ;; + 3072) + export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 + ;; + *) # This needs to be fixed - i.e. what to do about regional grids that are # not based on a parent global cubed-sphere grid. - export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 -fi + export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 + ;; +esac fi @@ -413,7 +416,7 @@ fi #----------------------------------------------------------------------- # # Generate a filtered orography file with a wide halo (i.e. with a halo -# width of nhw_T7 cells) for tile 7 from the corresponding raw orography +# width of NHW cells) for tile 7 from the corresponding raw orography # file. # # The following will create a filtered orography file named @@ -432,7 +435,7 @@ fi # mosaic file, the executable replaces the raw orography file # with its filtered counterpart (i.e. it gives the filtered file the # same name as the original raw file). Since in this (i.e. -# gtype="regional") case the mosaic file lists only tile 7, a filtered +# GTYPE="regional") case the mosaic file lists only tile 7, a filtered # orography file is generated only for tile 7. Thus, the grid files for # the first 6 tiles that were created above in GRID_DIR are not used # and thus do not need to be copied from GRID_DIR to filter_dir @@ -441,22 +444,22 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting filtering of orography..." -echo "gtype = \"$gtype\"" +print_info_msg "$VERBOSE" " +Starting filtering of orography..." # The script below creates absolute symlinks in $filter_dir. That's # probably necessary for NCO but probably better to create relative # links for the community workflow. -# Have to export gtype because it is not one of the arguments to the -# called script. -export gtype +# Have to create and export a new variable named gtype because the +# script called below expects it to be in the environment. +export gtype="$GTYPE" ${ufs_utils_ushdir}/${orog_fltr_scr} \ - $RES \ + $res \ ${FIXsar} ${raw_dir} ${filter_dir} \ $cd4 ${peak_fac} ${max_slope} ${n_del2_weak} \ ${ufs_utils_ushdir} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to script that generates filtered orography file returned with non- zero exit code." # @@ -470,12 +473,13 @@ zero exit code." # cd_vrfy ${filter_dir} mv_vrfy oro.${CRES}.tile${TILE_RGNL}.nc \ - oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc -#ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc \ + oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc +#ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc \ # oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - -print_info_msg_verbose "Filtering of orography complete." +print_info_msg "$VERBOSE" " +Filtering of orography complete." # #----------------------------------------------------------------------- # @@ -495,7 +499,7 @@ print_info_msg_verbose "Filtering of orography complete." # orography file without a halo and the one with a 4-cell-wide halo. # #unshaved_fp="${filter_dir}/oro.${CRES}.tile${TILE_RGNL}.nc" -unshaved_fp="${filter_dir}/oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc" +unshaved_fp="${filter_dir}/oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc" # # We perform the work in shave_dir, so change location to that directo- # ry. Once it is complete, we move the resultant file from shave_dir to @@ -508,24 +512,23 @@ cd_vrfy ${shave_dir} # call the shave executable. Finally, move the resultant file to the # OROG_DIR directory. # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " \"Shaving\" orography file with wide halo to obtain orography file with -${nh0_T7}-cell-wide halo..." +${NH0}-cell-wide halo..." -nml_fn="input.shave.orog.halo${nh0_T7}" -shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${nh0_T7}.nc" +nml_fn="input.shave.orog.halo${NH0}" +shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" printf "%s %s %s %s %s\n" \ - ${nx_T7} ${ny_T7} ${nh0_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH0} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to \"shave\" executable to generate (filtered) orography file with a 4-cell wide halo returned with nonzero exit code. The namelist file nml_fn is in directory shave_dir: shave_dir = \"${shave_dir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${OROG_DIR} # # Create an input namelist file for the shave executable to generate an @@ -533,24 +536,23 @@ mv_vrfy ${shaved_fp} ${OROG_DIR} # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " \"Shaving\" orography file with wide halo to obtain orography file with -${nh4_T7}-cell-wide halo..." +${NH4}-cell-wide halo..." -nml_fn="input.shave.orog.halo${nh4_T7}" -shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" +nml_fn="input.shave.orog.halo${NH4}" +shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ - ${nx_T7} ${ny_T7} ${nh4_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to \"shave\" executable to generate (filtered) orography file with a 4-cell wide halo returned with nonzero exit code. The namelist file nml_fn is in directory shave_dir: shave_dir = \"${shave_dir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${OROG_DIR} # # Change location back to the directory before shave_dir. @@ -568,25 +570,22 @@ cd_vrfy - # #----------------------------------------------------------------------- # - -$USHDIR/link_fix.sh \ - verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ +link_fix \ + verbose="$VERBOSE" \ file_group="orog" || \ - print_err_msg_exit "\ -Call to script to create links to orography files failed. -" +print_err_msg_exit "\ +Call to function to create links to orography files failed." # Moved the following to exregional_make_sfc_climo.sh script since it # needs to be done only if the make_sfc_climo task is run. -#print_info_msg_verbose "\ +#print_info_msg "$VERBOSE" " #Creating links needed by the make_sfc_climo task to the 4-halo grid and #orography files..." # if [ 0 = 1 ]; then cd_vrfy ${OROG_DIR} -ln_vrfy -sf ${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc \ +ln_vrfy -sf ${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc \ ${CRES}_oro_data.tile${TILE_RGNL}.nc fi @@ -603,13 +602,13 @@ fi if [ 0 = 1 ]; then cd_vrfy ${FIXsar} -filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${nh0_T7}.nc" +filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" ln_vrfy --relative -sf ${OROG_DIR}/$filename $FIXsar ln_vrfy -sf $filename oro_data.nc -filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" +filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" ln_vrfy --relative -sf ${OROG_DIR}/$filename $FIXsar -ln_vrfy -sf $filename oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc +ln_vrfy -sf $filename oro_data.tile${TILE_RGNL}.halo${NH4}.nc ln_vrfy -sf $filename oro_data.tile${TILE_RGNL}.nc fi # @@ -619,10 +618,12 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index ffe1f2b9f6..9dcfe54864 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -3,13 +3,20 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Source other necessary files. +# +#----------------------------------------------------------------------- +# +. $USHDIR/link_fix.sh # #----------------------------------------------------------------------- # @@ -22,15 +29,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that generates surface fields from climatology. ========================================================================" @@ -46,20 +65,16 @@ climatology. # valid_args=( "workdir" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -97,8 +112,8 @@ input_vegetation_type_file="${SFC_CLIMO_INPUT_DIR}/vegetation_type.igbp.0.05.nc" input_vegetation_greenness_file="${SFC_CLIMO_INPUT_DIR}/vegetation_greenness.0.144.nc" mosaic_file_mdl="${FIXsar}/${CRES}_mosaic.nc" orog_dir_mdl="${FIXsar}" -orog_files_mdl=${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc -halo=${nh4_T7} +orog_files_mdl=${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc +halo=${NH4} maximum_snow_albedo_method="bilinear" snowfree_albedo_method="bilinear" vegetation_greenness_method="bilinear" @@ -115,40 +130,30 @@ case $MACHINE in "WCOSS_C") # This could be wrong. Just a guess since I don't have access to this machine. - APRUN_SFC=${APRUN_SFC:-"aprun -j 1 -n 6 -N 6"} + APRUN=${APRUN:-"aprun -j 1 -n 6 -N 6"} ;; "WCOSS") # This could be wrong. Just a guess since I don't have access to this machine. - APRUN_SFC=${APRUN_SFC:-"aprun -j 1 -n 6 -N 6"} + APRUN=${APRUN:-"aprun -j 1 -n 6 -N 6"} ;; "THEIA") # Need to load intel/15.1.133. This and all other module loads should go into a module file. module load intel/15.1.133 module list - APRUN_SFC="mpirun -np ${SLURM_NTASKS}" + APRUN="mpirun -np ${SLURM_NTASKS}" ;; "HERA") - module purge - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - #module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - export NCEPLIBS=/scratch1/NCEPDEV/global/gwv/l819/lib - module use -a $NCEPLIBS/modulefiles - module load esmflocal/8_0_48b.netcdf47 - #module load esmf/7.1.0r - module list - APRUN_SFC="srun" + APRUN="srun" ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Run command has not been specified for this machine: MACHINE = \"$MACHINE\" - APRUN_SFC = \"$APRUN_SFC\"" + APRUN = \"$APRUN\"" ;; esac @@ -159,7 +164,7 @@ esac # #----------------------------------------------------------------------- # -$APRUN_SFC ${EXECDIR}/sfc_climo_gen || print_err_msg_exit "${script_name}" "\ +$APRUN ${EXECDIR}/sfc_climo_gen || print_err_msg_exit "\ Call to executable that generates surface climatology files returned with nonzero exit code." # @@ -169,7 +174,7 @@ with nonzero exit code." # #----------------------------------------------------------------------- # -case "$gtype" in +case "$GTYPE" in # # Consider, global, stetched, and nested grids. @@ -201,7 +206,7 @@ case "$gtype" in for fn in *.halo.nc; do if [ -f $fn ]; then bn="${fn%.halo.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${nh4_T7}.nc + mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc fi done # @@ -214,7 +219,7 @@ case "$gtype" in for fn in *.nc; do if [ -f $fn ]; then bn="${fn%.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${nh0_T7}.nc + mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc fi done ;; @@ -229,13 +234,11 @@ esac # #----------------------------------------------------------------------- # -$USHDIR/link_fix.sh \ - verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ +link_fix \ + verbose="$VERBOSE" \ file_group="sfc_climo" || \ - print_err_msg_exit "\ -Call to script to create links to surface climatology files failed. -" +print_err_msg_exit "\ +Call to function to create links to surface climatology files failed." # #----------------------------------------------------------------------- # @@ -255,10 +258,12 @@ touch "make_sfc_climo_files_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== All surface climatology files generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fcst.sh similarity index 60% rename from scripts/exregional_run_fv3.sh rename to scripts/exregional_run_fcst.sh index 6cdb62e1dc..2c34ec2f75 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fcst.sh @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,15 +21,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that runs a forecast with FV3 for the specified cycle. ========================================================================" @@ -46,20 +57,16 @@ specified cycle. # valid_args=( "CYCLE_DIR" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -98,7 +105,7 @@ case $MACHINE in ulimit -s unlimited ulimit -a - APRUN="mpirun -l -np $PE_MEMBER01" + APRUN="mpirun -l -np ${PE_MEMBER01}" ;; # "THEIA") @@ -136,37 +143,10 @@ case $MACHINE in ;; # "HERA") -# - - if [ "${USE_CCPP}" = "TRUE" ]; then - -# Need to change to the experiment directory to correctly load necessary -# modules for CCPP-version of FV3SAR in lines below - cd_vrfy ${EXPTDIR} - - set +x - source ./module-setup.sh - module use $( pwd -P ) - module load modules.fv3 - module list - set -x - - else - - . /apps/lmod/lmod/init/sh - module purge - module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - module load pnetcdf/1.10.0 - module list - - fi - ulimit -s unlimited ulimit -a APRUN="srun" + LD_LIBRARY_PATH="${UFS_WTHR_MDL_DIR}/FV3/ccpp/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" ;; # "JET") @@ -194,28 +174,19 @@ case $MACHINE in ulimit -s unlimited ulimit -a - APRUN="srun -n $PE_MEMBER01" + APRUN="srun -n ${PE_MEMBER01}" ;; # esac # #----------------------------------------------------------------------- # -# Change location to the INPUT subdirectory of the current cycle's run -# directory. -# -#----------------------------------------------------------------------- -# -#cd_vrfy ${CYCLE_DIR}/INPUT -# -#----------------------------------------------------------------------- -# # Create links in the INPUT subdirectory of the current cycle's run di- # rectory to the grid and (filtered) orography files. # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " Creating links in the INPUT subdirectory of the current cycle's run di- rectory to the grid and (filtered) orography files ..." @@ -232,20 +203,28 @@ fi # Symlink to mosaic file with a completely different name. target="${FIXsar}/${CRES}_mosaic.nc" +symlink="grid_spec.nc" if [ -f "${target}" ]; then - ln_vrfy -sf ${relative_or_null} $target grid_spec.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi # Symlink to halo-3 grid file with "halo4" stripped from name. -target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${nh3_T7}.nc" +target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" +if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] && \ + [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ + [ "${GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES}" = "FALSE" ]; then + symlink="C${GFDLgrid_RES}_grid.tile${TILE_RGNL}.nc" +else + symlink="${CRES}_grid.tile${TILE_RGNL}.nc" +fi if [ -f "${target}" ]; then - ln_vrfy -sf ${relative_or_null} $target ${CRES}_grid.tile${TILE_RGNL}.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -262,11 +241,12 @@ fi # Note that even though the message says "Stopped", the task still con- # sumes core-hours. # -target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" +target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4}.nc" +symlink="grid.tile${TILE_RGNL}.halo${NH4}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf $target ${relative_or_null} grid.tile${TILE_RGNL}.halo${nh4_T7}.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -279,11 +259,12 @@ if [ "${RUN_TASK_MAKE_OROG}" = "TRUE" ]; then fi # Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name. -target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${nh0_T7}.nc" +target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" +symlink="oro_data.nc" if [ -f "${target}" ]; then - ln_vrfy -sf ${relative_or_null} $target oro_data.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -301,11 +282,12 @@ fi # Note that even though the message says "Stopped", the task still con- # sumes core-hours. # -target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" +target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" +symlink="oro_data.tile${TILE_RGNL}.halo${NH4}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf $target ${relative_or_null} oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -328,13 +310,35 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " Creating links with names that FV3 looks for in the INPUT subdirectory of the current cycle's run directory (CYCLE_DIR)..." cd_vrfy ${CYCLE_DIR}/INPUT -ln_vrfy -sf gfs_data.tile${TILE_RGNL}.halo${nh0_T7}.nc gfs_data.nc -ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${nh0_T7}.nc sfc_data.nc +#ln_vrfy -sf gfs_data.tile${TILE_RGNL}.halo${NH0}.nc gfs_data.nc +#ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${NH0}.nc sfc_data.nc + +relative_or_null="" + +target="gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" +symlink="gfs_data.nc" +if [ -f "${target}" ]; then + ln_vrfy -sf ${relative_or_null} $target $symlink +else + print_err_msg_exit "\ +Cannot create symlink because target does not exist: + target = \"$target}\"" +fi + +target="sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" +symlink="sfc_data.nc" +if [ -f "${target}" ]; then + ln_vrfy -sf ${relative_or_null} $target $symlink +else + print_err_msg_exit "\ +Cannot create symlink because target does not exist: + target = \"$target}\"" +fi # #----------------------------------------------------------------------- # @@ -345,36 +349,41 @@ ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${nh0_T7}.nc sfc_data.nc # cd_vrfy ${CYCLE_DIR} -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " Creating links in the current cycle's run directory to static (fix) files in the FIXam directory..." # # If running in "nco" mode, FIXam is simply a symlink under the workflow # directory that points to the system directory containing the fix # files. The files in this system directory are named as listed in the -# FIXam_FILES_SYSDIR array. Thus, that is the array to use to form the -# names of the link targets, but the names of the symlinks themselves -# must be as specified in the FIXam_FILES_EXPTDIR array (because that +# FIXgsm_FILENAMES array. Thus, that is the array to use to form the +# names of the targets of the symlinks, but the names of the symlinks themselves +# must be as specified in the FIXam_FILENAMES array (because that # array contains the file names that FV3 looks for). # if [ "${RUN_ENVIR}" = "nco" ]; then for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - ln_vrfy -sf $FIXam/${FIXam_FILES_SYSDIR[$i]} ${CYCLE_DIR}/${FIXam_FILES_EXPTDIR[$i]} +# Note: Can link directly to files in FIXgsm without needing a local +# FIXam directory, i.e. use +# ln_vrfy -sf $FIXgsm/${FIXgsm_FILENAMES[$i]} \ +# ${CYCLE_DIR}/${FIXam_FILENAMES[$i]} + ln_vrfy -sf $FIXam/${FIXgsm_FILENAMES[$i]} \ + ${CYCLE_DIR}/${FIXam_FILENAMES[$i]} done # # If not running in "nco" mode, FIXam is an actual directory (not a sym- # link) in the experiment directory that contains the same files as the -# system fix directory except that the files have renamed to the file -# names that FV3 looks for. Thus, when creating links to the files in -# this directory, both the target and symlink names should be the ones -# specified in the FIXam_FILES_EXPTDIR array (because that array con- -# tains the file names that FV3 looks for). +# system fix directory except that the files have been renamed to the +# file names that FV3 looks for. Thus, when creating links to the files +# in this directory, both the target and symlink names should be the +# ones specified in the FIXam_FILENAMES array (because that array +# contains the file names that FV3 looks for). # else for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - ln_vrfy -sf --relative $FIXam/${FIXam_FILES_EXPTDIR[$i]} ${CYCLE_DIR} + ln_vrfy -sf --relative $FIXam/${FIXam_FILENAMES[$i]} ${CYCLE_DIR} done fi @@ -396,26 +405,24 @@ rm_vrfy -f time_stamp.out # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " Creating links in the current cycle's run directory to cycle-independent model input files in the main experiment directory..." -ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/${FV3_NML_FN} -ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/${DATA_TABLE_FN} -ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/${FIELD_TABLE_FN} -ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/${NEMS_CONFIG_FN} +ln_vrfy -sf -t ${CYCLE_DIR} ${DATA_TABLE_FP} +ln_vrfy -sf -t ${CYCLE_DIR} ${FIELD_TABLE_FP} +ln_vrfy -sf -t ${CYCLE_DIR} ${FV3_NML_FP} +ln_vrfy -sf -t ${CYCLE_DIR} ${NEMS_CONFIG_FP} if [ "${USE_CCPP}" = "TRUE" ]; then - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/module-setup.sh - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/modules.fv3 - if [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/suite_FV3_GSD_v0.xml - elif [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/suite_FV3_GFS_2017_gfdlmp.xml - fi - if [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then + + ln_vrfy -sf -t ${CYCLE_DIR} ${CCPP_PHYS_SUITE_FP} + + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/CCN_ACTIVATE.BIN fi + fi # #----------------------------------------------------------------------- @@ -425,21 +432,21 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ -Copying cycle-independent model input files from the templates directory +print_info_msg "$VERBOSE" " +Copying cycle-dependent model input files from the templates directory to the current cycle's run directory..." -cp_vrfy ${TEMPLATE_DIR}/${MODEL_CONFIG_FN} ${CYCLE_DIR} +print_info_msg "$VERBOSE" " + Copying the template diagnostics table file to the current cycle's run + directory..." +diag_table_fp="${CYCLE_DIR}/${DIAG_TABLE_FN}" +cp_vrfy "${DIAG_TABLE_TMPL_FP}" "${diag_table_fp}" -if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then - cp_vrfy ${TEMPLATE_DIR}/${DIAG_TABLE_FN} ${CYCLE_DIR} - elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then - cp_vrfy ${TEMPLATE_DIR}/${DIAG_TABLE_CCPP_GSD_FN} ${CYCLE_DIR}/${DIAG_TABLE_FN} - fi -elif [ "${USE_CCPP}" = "false" ]; then - cp_vrfy ${TEMPLATE_DIR}/${DIAG_TABLE_FN} ${CYCLE_DIR} -fi +print_info_msg "$VERBOSE" " + Copying the template model configuration file to the current cycle's + run directory..." +model_config_fp="${CYCLE_DIR}/${MODEL_CONFIG_FN}" +cp_vrfy "${MODEL_CONFIG_TMPL_FP}" "${model_config_fp}" # #----------------------------------------------------------------------- # @@ -456,39 +463,54 @@ YYYYMMDD=${CDATE:0:8} # #----------------------------------------------------------------------- # -# Set the full path to the model configuration file. Then set parame- -# ters in that file. +# Set parameters in the diagnostics table file. # #----------------------------------------------------------------------- # -MODEL_CONFIG_FP="${CYCLE_DIR}/${MODEL_CONFIG_FN}" +print_info_msg "$VERBOSE" " +Setting parameters in file: + diag_table_fp = \"${diag_table_fp}\"" -print_info_msg_verbose "\ +set_file_param "${diag_table_fp}" "CRES" "$CRES" +set_file_param "${diag_table_fp}" "YYYY" "$YYYY" +set_file_param "${diag_table_fp}" "MM" "$MM" +set_file_param "${diag_table_fp}" "DD" "$DD" +set_file_param "${diag_table_fp}" "HH" "$HH" +set_file_param "${diag_table_fp}" "YYYYMMDD" "$YYYYMMDD" +# +#----------------------------------------------------------------------- +# +# Set parameters in the model configuration file. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " Setting parameters in file: - MODEL_CONFIG_FP = \"$MODEL_CONFIG_FP\"" + model_config_fp = \"${model_config_fp}\"" dot_quilting_dot="."${QUILTING,,}"." +dot_print_esmf_dot="."${PRINT_ESMF,,}"." -set_file_param "$MODEL_CONFIG_FP" "PE_MEMBER01" "$PE_MEMBER01" -set_file_param "$MODEL_CONFIG_FP" "dt_atmos" "$dt_atmos" -set_file_param "$MODEL_CONFIG_FP" "start_year" "$YYYY" -set_file_param "$MODEL_CONFIG_FP" "start_month" "$MM" -set_file_param "$MODEL_CONFIG_FP" "start_day" "$DD" -set_file_param "$MODEL_CONFIG_FP" "start_hour" "$HH" -set_file_param "$MODEL_CONFIG_FP" "nhours_fcst" "${FCST_LEN_HRS}" -set_file_param "$MODEL_CONFIG_FP" "ncores_per_node" "$ncores_per_node" -set_file_param "$MODEL_CONFIG_FP" "quilting" "${dot_quilting_dot}" -set_file_param "$MODEL_CONFIG_FP" "print_esmf" "$print_esmf" +set_file_param "${model_config_fp}" "PE_MEMBER01" "${PE_MEMBER01}" +set_file_param "${model_config_fp}" "dt_atmos" "${DT_ATMOS}" +set_file_param "${model_config_fp}" "start_year" "$YYYY" +set_file_param "${model_config_fp}" "start_month" "$MM" +set_file_param "${model_config_fp}" "start_day" "$DD" +set_file_param "${model_config_fp}" "start_hour" "$HH" +set_file_param "${model_config_fp}" "nhours_fcst" "${FCST_LEN_HRS}" +set_file_param "${model_config_fp}" "ncores_per_node" "${NCORES_PER_NODE}" +set_file_param "${model_config_fp}" "quilting" "${dot_quilting_dot}" +set_file_param "${model_config_fp}" "print_esmf" "${dot_print_esmf_dot}" # #----------------------------------------------------------------------- # # If the write component is to be used, then a set of parameters, in- # cluding those that define the write component's output grid, need to -# be specified in the model configuration file (MODEL_CONFIG_FP). This +# be specified in the model configuration file (model_config_fp). This # is done by appending a template file (in which some write-component # parameters are set to actual values while others are set to placehol- -# ders) to MODEL_CONFIG_FP and then replacing the placeholder values in -# the (new) MODEL_CONFIG_FP file with actual values. The full path of +# ders) to model_config_fp and then replacing the placeholder values in +# the (new) model_config_fp file with actual values. The full path of # this template file is specified in the variable WRTCMP_PA RAMS_TEMP- # LATE_FP. # @@ -496,67 +518,52 @@ set_file_param "$MODEL_CONFIG_FP" "print_esmf" "$print_esmf" # if [ "$QUILTING" = "TRUE" ]; then - cat $WRTCMP_PARAMS_TEMPLATE_FP >> $MODEL_CONFIG_FP + cat ${WRTCMP_PARAMS_TMPL_FP} >> ${model_config_fp} - set_file_param "$MODEL_CONFIG_FP" "write_groups" "$WRTCMP_write_groups" - set_file_param "$MODEL_CONFIG_FP" "write_tasks_per_group" "$WRTCMP_write_tasks_per_group" + set_file_param "${model_config_fp}" "write_groups" "$WRTCMP_write_groups" + set_file_param "${model_config_fp}" "write_tasks_per_group" "$WRTCMP_write_tasks_per_group" - set_file_param "$MODEL_CONFIG_FP" "output_grid" "\'$WRTCMP_output_grid\'" - set_file_param "$MODEL_CONFIG_FP" "cen_lon" "$WRTCMP_cen_lon" - set_file_param "$MODEL_CONFIG_FP" "cen_lat" "$WRTCMP_cen_lat" - set_file_param "$MODEL_CONFIG_FP" "lon1" "$WRTCMP_lon_lwr_left" - set_file_param "$MODEL_CONFIG_FP" "lat1" "$WRTCMP_lat_lwr_left" + set_file_param "${model_config_fp}" "output_grid" "\'$WRTCMP_output_grid\'" + set_file_param "${model_config_fp}" "cen_lon" "$WRTCMP_cen_lon" + set_file_param "${model_config_fp}" "cen_lat" "$WRTCMP_cen_lat" + set_file_param "${model_config_fp}" "lon1" "$WRTCMP_lon_lwr_left" + set_file_param "${model_config_fp}" "lat1" "$WRTCMP_lat_lwr_left" if [ "${WRTCMP_output_grid}" = "rotated_latlon" ]; then - set_file_param "$MODEL_CONFIG_FP" "lon2" "$WRTCMP_lon_upr_rght" - set_file_param "$MODEL_CONFIG_FP" "lat2" "$WRTCMP_lat_upr_rght" - set_file_param "$MODEL_CONFIG_FP" "dlon" "$WRTCMP_dlon" - set_file_param "$MODEL_CONFIG_FP" "dlat" "$WRTCMP_dlat" + set_file_param "${model_config_fp}" "lon2" "$WRTCMP_lon_upr_rght" + set_file_param "${model_config_fp}" "lat2" "$WRTCMP_lat_upr_rght" + set_file_param "${model_config_fp}" "dlon" "$WRTCMP_dlon" + set_file_param "${model_config_fp}" "dlat" "$WRTCMP_dlat" elif [ "${WRTCMP_output_grid}" = "lambert_conformal" ]; then - set_file_param "$MODEL_CONFIG_FP" "stdlat1" "$WRTCMP_stdlat1" - set_file_param "$MODEL_CONFIG_FP" "stdlat2" "$WRTCMP_stdlat2" - set_file_param "$MODEL_CONFIG_FP" "nx" "$WRTCMP_nx" - set_file_param "$MODEL_CONFIG_FP" "ny" "$WRTCMP_ny" - set_file_param "$MODEL_CONFIG_FP" "dx" "$WRTCMP_dx" - set_file_param "$MODEL_CONFIG_FP" "dy" "$WRTCMP_dy" + set_file_param "${model_config_fp}" "stdlat1" "$WRTCMP_stdlat1" + set_file_param "${model_config_fp}" "stdlat2" "$WRTCMP_stdlat2" + set_file_param "${model_config_fp}" "nx" "$WRTCMP_nx" + set_file_param "${model_config_fp}" "ny" "$WRTCMP_ny" + set_file_param "${model_config_fp}" "dx" "$WRTCMP_dx" + set_file_param "${model_config_fp}" "dy" "$WRTCMP_dy" + elif [ "${WRTCMP_output_grid}" = "regional_latlon" ]; then + set_file_param "${model_config_fp}" "lon2" "$WRTCMP_lon_upr_rght" + set_file_param "${model_config_fp}" "lat2" "$WRTCMP_lat_upr_rght" + set_file_param "${model_config_fp}" "dlon" "$WRTCMP_dlon" + set_file_param "${model_config_fp}" "dlat" "$WRTCMP_dlat" fi fi # #----------------------------------------------------------------------- # -# Set the full path to the file that specifies the fields to output. -# Then set parameters in that file. -# -#----------------------------------------------------------------------- -# -DIAG_TABLE_FP="${CYCLE_DIR}/${DIAG_TABLE_FN}" - -print_info_msg_verbose "\ -Setting parameters in file: - DIAG_TABLE_FP = \"$DIAG_TABLE_FP\"" - -set_file_param "$DIAG_TABLE_FP" "CRES" "$CRES" -set_file_param "$DIAG_TABLE_FP" "YYYY" "$YYYY" -set_file_param "$DIAG_TABLE_FP" "MM" "$MM" -set_file_param "$DIAG_TABLE_FP" "DD" "$DD" -set_file_param "$DIAG_TABLE_FP" "HH" "$HH" -set_file_param "$DIAG_TABLE_FP" "YYYYMMDD" "$YYYYMMDD" -# -#----------------------------------------------------------------------- -# # Copy the FV3SAR executable to the run directory. # #----------------------------------------------------------------------- # if [ "${USE_CCPP}" = "TRUE" ]; then - FV3SAR_EXEC="$NEMSfv3gfs_DIR/tests/fv3.exe" + FV3SAR_EXEC="${UFS_WTHR_MDL_DIR}/tests/fv3.exe" else - FV3SAR_EXEC="$NEMSfv3gfs_DIR/tests/fv3_32bit.exe" + FV3SAR_EXEC="${UFS_WTHR_MDL_DIR}/tests/fv3_32bit.exe" fi if [ -f $FV3SAR_EXEC ]; then - print_info_msg_verbose "\ + print_info_msg "$VERBOSE" " Copying the FV3SAR executable to the run directory..." cp_vrfy ${FV3SAR_EXEC} ${CYCLE_DIR}/fv3_gfs.x else @@ -587,7 +594,8 @@ export OMP_STACKSIZE=1024m #----------------------------------------------------------------------- # $APRUN ./fv3_gfs.x || print_err_msg_exit "\ -Call to executable to run FV3SAR forecast returned with nonzero exit code." +Call to executable to run FV3SAR forecast returned with nonzero exit +code." # #----------------------------------------------------------------------- # @@ -595,10 +603,12 @@ Call to executable to run FV3SAR forecast returned with nonzero exit code." # #----------------------------------------------------------------------- # -print_info_msg "\ +print_info_msg " ======================================================================== FV3 forecast completed successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 44eb1f0c79..0f278751a5 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -3,13 +3,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,15 +21,27 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. +# +#----------------------------------------------------------------------- +# +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + This is the ex-script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. ========================================================================" @@ -46,20 +57,16 @@ the output files corresponding to a specified forecast hour. # valid_args=( "cycle_dir" "postprd_dir" "fhr_dir" "fhr" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -67,7 +74,8 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting post-processing for fhr = $fhr hr..." +print_info_msg "$VERBOSE" " +Starting post-processing for fhr = $fhr hr..." case $MACHINE in @@ -113,39 +121,7 @@ case $MACHINE in "HERA") - { save_shell_opts; set +x; } > /dev/null 2>&1 - module purge - - module load intel/19.0.4.243 - module load impi/2019.0.4 - -# module use /contrib/modulefiles - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - -# Loading nceplibs modules - module load sigio/2.1.1 - module load jasper/1.900.1 - module load png/1.2.44 - module load z/1.2.11 - module load sfcio/1.1.1 - module load nemsio/2.2.4 - module load bacio/2.0.3 - module load g2/3.1.1 -# module load xmlparse/v2.0.0 - module load gfsio/1.1.0 - module load ip/3.0.2 - module load sp/2.0.3 - module load w3emc/2.3.1 - module load w3nco/2.0.7 - module load crtm/2.2.5 -# module load netcdf/3.6.3 - module load netcdf/4.7.0 - module load g2tmpl/1.5.1 - module load wrfio/1.1.1 - - export NDATE=/scratch3/NCEPDEV/nwprod/lib/prod_util/v1.1.0/exec/ndate - - { restore_shell_opts; } > /dev/null 2>&1 +# export NDATE=/scratch3/NCEPDEV/nwprod/lib/prod_util/v1.1.0/exec/ndate APRUN="srun" ;; @@ -276,15 +252,17 @@ else grid_name="${GRID_GEN_METHOD}" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - stretch_str="S$( printf "%s" "${stretch_fac}" | sed "s|\.|p|" )" - refine_str="RR${refine_ratio}" + stretch_str="S$( printf "%s" "${STRETCH_FAC}" | sed "s|\.|p|" )" + refine_str="RR${GFDLgrid_REFINE_RATIO}" grid_name="${grid_name}_${CRES}_${stretch_str}_${refine_str}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - nx_T7_str="NX$( printf "%s" "${nx_T7}" | sed "s|\.|p|" )" - ny_T7_str="NY$( printf "%s" "${ny_T7}" | sed "s|\.|p|" )" - a_grid_param_str="A$( printf "%s" "${a_grid_param}" | sed "s|-|mns|" | sed "s|\.|p|" )" - k_grid_param_str="K$( printf "%s" "${k_grid_param}" | sed "s|-|mns|" | sed "s|\.|p|" )" - grid_name="${grid_name}_${nx_T7_str}_${ny_T7_str}_${a_grid_param_str}_${k_grid_param_str}" + nx_str="NX$( printf "%s" "$NX" | sed "s|\.|p|" )" + ny_str="NY$( printf "%s" "$NY" | sed "s|\.|p|" )" + JPgrid_alpha_param_str="A"$( printf "%s" "${JPgrid_ALPHA_PARAM}" | \ + sed "s|-|mns|" | sed "s|\.|p|" ) + JPgrid_kappa_param_str="K"$( printf "%s" "${JPgrid_KAPPA_PARAM}" | \ + sed "s|-|mns|" | sed "s|\.|p|" ) + grid_name="${grid_name}_${nx_str}_${ny_str}_${JPgrid_alpha_param_str}_${JPgrid_kappa_param_str}" fi fi @@ -309,10 +287,12 @@ rm_vrfy -rf ${fhr_dir} # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Post-processing for forecast hour $fhr completed successfully. -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/tests/baseline_configs/config.regional_001.sh b/tests/baseline_configs/config.regional_001.sh new file mode 100644 index 0000000000..4158bdaf39 --- /dev/null +++ b/tests/baseline_configs/config.regional_001.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="GSMGFS" +EXTRN_MDL_NAME_LBCS="GSMGFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_002.sh b/tests/baseline_configs/config.regional_002.sh new file mode 100644 index 0000000000..a6d75cda08 --- /dev/null +++ b/tests/baseline_configs/config.regional_002.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_003.sh b/tests/baseline_configs/config.regional_003.sh new file mode 100644 index 0000000000..689f5c4c1a --- /dev/null +++ b/tests/baseline_configs/config.regional_003.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GSD_v0" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="HRRRX" +EXTRN_MDL_NAME_LBCS="RAPX" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_004.sh b/tests/baseline_configs/config.regional_004.sh new file mode 100644 index 0000000000..cc75361a69 --- /dev/null +++ b/tests/baseline_configs/config.regional_004.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GSD_SAR" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="HRRRX" +EXTRN_MDL_NAME_LBCS="RAPX" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_005.sh b/tests/baseline_configs/config.regional_005.sh new file mode 100644 index 0000000000..a9544b0940 --- /dev/null +++ b/tests/baseline_configs/config.regional_005.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR_AK_50km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GSD_SAR" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="RAPX" +EXTRN_MDL_NAME_LBCS="RAPX" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_006.sh b/tests/baseline_configs/config.regional_006.sh new file mode 100644 index 0000000000..757c94cad7 --- /dev/null +++ b/tests/baseline_configs/config.regional_006.sh @@ -0,0 +1,62 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="FALSE" +GRID_DIR="/scratch2/BMC/det/FV3SAR_pregen/grid/GSD_HRRR25km" + +RUN_TASK_MAKE_OROG="FALSE" +OROG_DIR="/scratch2/BMC/det/FV3SAR_pregen/orog/GSD_HRRR25km" + +RUN_TASK_MAKE_SFC_CLIMO="FALSE" +SFC_CLIMO_DIR="/scratch2/BMC/det/FV3SAR_pregen/sfc_climo/GSD_HRRR25km" + diff --git a/tests/baseline_configs/config.regional_007.sh b/tests/baseline_configs/config.regional_007.sh new file mode 100644 index 0000000000..be1698c7e3 --- /dev/null +++ b/tests/baseline_configs/config.regional_007.sh @@ -0,0 +1,103 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +GRID_GEN_METHOD="GFDLgrid" + +GFDLgrid_LON_T6_CTR=-97.5 +GFDLgrid_LAT_T6_CTR=38.5 +GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_RES="96" +GFDLgrid_REFINE_RATIO=2 + +#num_margin_cells_T6_left=9 +#GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) +GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_right=9 +#GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) +GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G="87" + +#num_margin_cells_T6_bottom=9 +#GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) +GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_top=9 +#GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) +GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G="87" + +GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" + +DT_ATMOS="100" + +LAYOUT_X="6" +LAYOUT_Y="6" +BLOCKSIZE="26" + +QUILTING="TRUE" + +if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.24" + WRTCMP_dlat="0.24" +fi + +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_008.sh b/tests/baseline_configs/config.regional_008.sh new file mode 100644 index 0000000000..0a3cbbaf46 --- /dev/null +++ b/tests/baseline_configs/config.regional_008.sh @@ -0,0 +1,103 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +GRID_GEN_METHOD="GFDLgrid" + +GFDLgrid_LON_T6_CTR=-97.5 +GFDLgrid_LAT_T6_CTR=38.5 +GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_RES="96" +GFDLgrid_REFINE_RATIO=2 + +#num_margin_cells_T6_left=9 +#GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) +GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_right=9 +#GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) +GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G="87" + +#num_margin_cells_T6_bottom=9 +#GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) +GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_top=9 +#GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) +GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G="87" + +GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="FALSE" + +DT_ATMOS="100" + +LAYOUT_X="6" +LAYOUT_Y="6" +BLOCKSIZE="26" + +QUILTING="TRUE" + +if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.24" + WRTCMP_dlat="0.24" +fi + +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baselines_list.txt b/tests/baselines_list.txt new file mode 100644 index 0000000000..db0531946c --- /dev/null +++ b/tests/baselines_list.txt @@ -0,0 +1,5 @@ +regional_001 +regional_002 +regional_003 +regional_004 +regional_005 diff --git a/tests/param_arrays.sample.sh b/tests/param_arrays.sample.sh deleted file mode 100644 index e1d43b347b..0000000000 --- a/tests/param_arrays.sample.sh +++ /dev/null @@ -1,7 +0,0 @@ -all_vals_predef_domain=( "HRRR" ) -all_vals_grid_gen_method=( "JPgrid" ) -all_vals_CCPP=( "true" ) -all_vals_phys_suite=( "GSD" ) -all_vals_CDATE=( "2017090700" ) -all_vals_fcst_len_hrs=( "6" ) -all_vals_quilting=( "true" ) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh new file mode 100755 index 0000000000..95a141c6f2 --- /dev/null +++ b/tests/run_experiments.sh @@ -0,0 +1,471 @@ +#!/bin/bash -l + +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# The current script should be located in the "tests" subdirectory of +# the workflow directory, which we denote by homerrfs. Thus, the work- +# flow directory (homerrfs) is the one above the directory of the cur- +# rent script. Set HOMRErrfs accordingly. +# +#----------------------------------------------------------------------- +# +homerrfs=${scrfunc_dir%/*} +# +#----------------------------------------------------------------------- +# +# Set directories. +# +#----------------------------------------------------------------------- +# +ushdir="$homerrfs/ush" +baseline_configs_dir="$homerrfs/tests/baseline_configs" +# +#----------------------------------------------------------------------- +# +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +. $ushdir/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). +# +#----------------------------------------------------------------------- +# +valid_args=( \ +"expts_file" \ +"machine" \ +"account" \ +"use_cron_to_relaunch" \ +"cron_relaunch_intvl_mnts" \ +) +process_args valid_args "$@" +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# +if [ 1 = 0 ]; then + if [ "$#" -ne 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Number of arguments specified: $# + +Usage: + + ${scrfunc_fn} expts_file + +where expts_file is the name of the file containing the list of experi- +ments to run. If expts_file is the absolute path to a file, it is used +as is. If it is a relative path (including just a file name), it is as- +sumed to be given relative to the path from which this script is called. +" + + fi +fi +# +#----------------------------------------------------------------------- +# +# Verify that an experiments list file has been specified. If not, +# print out an error message and exit. +# +#----------------------------------------------------------------------- +# +# Note: +# The function process_args() should be modified to look for required +# arguments, which can be denoted by appending to the name of a required +# argument the string "; REQUIRED". It can then check that all required +# arguments are in fact specified in the arguments list. That way, the +# following if-statement will not be needed since process_args() will +# catch the case of missing required arguments. +# + if [ -z "${expts_file}" ] || \ + [ -z "${machine}" ] || \ + [ -z "${account}" ]; then + print_err_msg_exit "\ +An experiments list file (expts_file), a machine name (machine), and an +account name (account) must be specified as input arguments to this +script. One or more of these is currently set to an empty string: + expts_file = \"${expts_file}\" + machine = \"${machine}\" + account = \"${account}\" +Use the following format to specify these in the argument list passed to +this script: + ${scrfunc_fn} \\ + expts_file=\"name_of_file_or_full_path_to_file\" \\ + machine=\"name_of_machine_to_run_on\" \\ + account=\"name_of_hpc_account_to_use\" \\ + ..." + fi +# +#----------------------------------------------------------------------- +# +# Get the full path to the experiments list file and verify that it ex- +# ists. +# +#----------------------------------------------------------------------- +# + expts_list_fp=$( readlink -f "${expts_file}" ) + + if [ ! -f "${expts_list_fp}" ]; then + print_err_msg_exit "\ +The experiments list file (expts_file) specified as an argument to this +script (and with full path given by expts_list_fp) does not exist: + expts_file = \"${expts_file}\" + expts_list_fp = \"${expts_list_fp}\"" + fi +# +#----------------------------------------------------------------------- +# +# Read in the list of experiments (which might be baselines) to run. +# This entails reading in each line of the file expts_list.txt in the +# directory of this script and saving the result in the array variable +# expts_list. Note that each line of expts_list.txt has the form +# +# BASELINE_NAME | VAR_NAME_1="VAR_VALUE_1" | ... | VAR_NAME_N="VAR_VALUE_N" +# +# where BASELINE_NAME is the name of the baseline and the zero or more +# variable name-value pairs following the baseline name are a list of +# variables to modify from the baseline. Note that: +# +# 1) There must exist a experiment/workflow configuration file named +# config.BASELINE_NAME.sh in a subdirectory named baseline_configs +# in the directory of this script. +# +# 2) The variable name-value pairs on each line of the expts_list.txt +# file are delimited from the baseline and from each other by pipe +# characters (i.e. "|"). +# +#----------------------------------------------------------------------- +# +print_info_msg " +Reading in list of forecast experiments from file + expts_list_fp = \"${expts_list_fp}\" +and storing result in the array \"all_lines\" (one array element per expe- +riment)..." + +readarray -t all_lines < "${expts_list_fp}" + +all_lines_str=$( printf "\'%s\'\n" "${all_lines[@]}" ) +print_info_msg " +All lines from experiments list file (expts_list_fp) read in, where: + expts_list_fp = \"${expts_list_fp}\" +Contents of file are (line by line, each line within single quotes, and +before any processing): + +${all_lines_str} +" +# +#----------------------------------------------------------------------- +# +# Loop through the elements of all_lines and modify each line to remove +# leading and trailing whitespace and any whitespace before and after +# the field separator character (which is the pipe character, "|"). Al- +# so, drop any elements that are empty after this processing, and save +# the resulting set of non-empty elements in the array expts_list. +# +#----------------------------------------------------------------------- +# +expts_list=() +field_separator="\|" # Need backslash as an escape sequence in the sed commands below. + +j=0 +num_lines="${#all_lines[@]}" +for (( i=0; i<=$((num_lines-1)); i++ )); do +# +# Remove all leading and trailing whitespace from the current element of +# all_lines. +# + all_lines[$i]=$( printf "%s" "${all_lines[$i]}" | \ + sed -r -e "s/^[ ]*//" -e "s/[ ]*$//" ) +# +# Remove spaces before and after all field separators in the current +# element of all_lines. Note that we use the pipe symbol, "|", as the +# field separator. +# + all_lines[$i]=$( printf "%s" "${all_lines[$i]}" | \ + sed -r -e "s/[ ]*${field_separator}[ ]*/${field_separator}/g" ) +# +# If the last character of the current line is a field separator, remove +# it. +# + all_lines[$i]=$( printf "%s" "${all_lines[$i]}" | \ + sed -r -e "s/${field_separator}$//g" ) +# +# If after the processing above the current element of all_lines is not +# empty, save it as the next element of expts_list. +# + if [ ! -z "${all_lines[$i]}" ]; then + expts_list[$j]="${all_lines[$i]}" + j=$((j+1)) + fi + +done +# +#----------------------------------------------------------------------- +# +# Get the number of experiments to run and print out an informational +# message. +# +#----------------------------------------------------------------------- +# +num_expts="${#expts_list[@]}" +expts_list_str=$( printf " \'%s\'\n" "${expts_list[@]}" ) +print_info_msg " +After processing, the number of experiments to run (num_expts) is: + num_expts = ${num_expts} +The list of forecast experiments to run (one experiment per line) is gi- +ven by: +${expts_list_str} +" +# +#----------------------------------------------------------------------- +# +# Loop through the elements of the array expts_list. For each element +# (i.e. for each experiment), generate an experiment directory and cor- +# responding workflow and then launch the workflow. +# +#----------------------------------------------------------------------- +# +for (( i=0; i<=$((num_expts-1)); i++ )); do + + print_info_msg " +Processing experiment \"${expts_list[$i]}\" ..." +# +# Get the name of the baseline on which the current experiment is based. +# Then save the remainder of the current element of expts_list in the +# variable "remainder". Note that if this variable is empty, then the +# current experiment is identical to the current baseline. If not, then +# "remainder" contains the modifications that need to be made to the +# current baseline to obtain the current experiment. +# + regex_search="^([^\|]*)(\|(.*)|)" + baseline_name=$( printf "%s" "${expts_list[$i]}" | \ + sed -r -n -e "s/${regex_search}/\1/p" ) + remainder=$( printf "%s" "${expts_list[$i]}" | \ + sed -r -n -e "s/${regex_search}/\3/p" ) +# +# Get the names and corresponding values of the variables that need to +# be modified in the current baseline to obtain the current experiment. +# The following while-loop steps through all the variables listed in +# "remainder" +# + modvar_name=() + modvar_value=() + num_mod_vars=0 + while [ ! -z "${remainder}" ]; do +# +# Get the next variable-value pair in remainder, and save what is left +# of remainder back into itself. +# + next_field=$( printf "%s" "$remainder" | \ + sed -r -e "s/${regex_search}/\1/" ) + remainder=$( printf "%s" "$remainder" | \ + sed -r -e "s/${regex_search}/\3/" ) +# +# Save the name of the variable in the variable-value pair obtained +# above in the array modvar_name. Then save the value in the variable- +# value pair in the array modvar_value. +# + modvar_name[${num_mod_vars}]=$( printf "%s" "${next_field}" | \ + sed -r -e "s/^([^=]*)=(.*)/\1/" ) + modvar_value[${num_mod_vars}]=$( printf "%s" "${next_field}" | \ + sed -r -e "s/^([^=]*)=(\")?([^\"]+*)(\")?/\3/" ) +# +# Increment the index that keeps track of the number of variables that +# need to be modified in the current baseline to obtain the current ex- +# periment. +# + num_mod_vars=$((num_mod_vars+1)) + + done +# +# Generate the path to the configuration file for the current baseline. +# This will be modified to obtain the configuration file for the current +# experiment. +# + baseline_config_fp="${baseline_configs_dir}/config.${baseline_name}.sh" +# +# Print out an error message and exit if a configuration file for the +# current baseline does not exist. +# + if [ ! -f "${baseline_config_fp}" ]; then + print_err_msg_exit "\ +The experiment/workflow configuration file (baseline_config_fp) for the +specified baseline (baseline_name) does not exist: + baseline_name = \"${baseline_name}\" + baseline_config_fp = \"${baseline_config_fp}\" +Please correct and rerun." + fi +# +# We require that EXPT_SUBDIR in the configuration file for the baseline +# be set to the name of the baseline. Check for this by extracting the +# value of EXPT_SUBDIR from the baseline configuration file and compa- +# ring it to baseline_name. +# +if [ 0 = 1 ]; then + regex_search="^[ ]*EXPT_SUBDIR=(\")?([^ =\"]+)(.*)" + EXPT_SUBDIR=$( sed -r -n -e "s/${regex_search}/\2/p" \ + "${baseline_config_fp}" ) + if [ "${EXPT_SUBDIR}" != "${baseline_name}" ]; then + print_err_msg_exit "\ +The name of the experiment subdirectory (EXPT_SUBDIR) in the configura- +tion file (baseline_config_fp) for the current baseline does not match +the name of the baseline (baseline_name): + baseline_name = \"${baseline_name}\" + baseline_config_fp = \"${baseline_config_fp}\" + EXPT_SUBDIR = \"${EXPT_SUBDIR}\"" + fi +fi +# +# Generate a name for the current experiment. We start with the name of +# the current baseline and modify it to indicate which variables must be +# reset to obtain the current experiment. +# + expt_name="${baseline_name}" + for (( j=0; j<${num_mod_vars}; j++ )); do + if [ $j -lt ${#modvar_name[@]} ]; then + expt_name="${expt_name}__${modvar_name[$j]}.eq.${modvar_value[$j]}" + else + break + fi + done +# +# Set expt_subdir to the name of the current experiment. Below, we will +# write this to the configuration file for the current experiment. +# + expt_subdir="${expt_name}" +# +# Create a configuration file for the current experiment. We do this by +# first copying the baseline configuration file and then modifying the +# the values of those variables within it that are different between the +# baseline and the experiment. +# + expt_config_fp="$ushdir/config.${expt_name}.sh" + cp_vrfy "${baseline_config_fp}" "${expt_config_fp}" +# +#----------------------------------------------------------------------- +# +# Set the name of the experiment subdirectory (EXPT_SUBDIR) in the expe- +# riment configuration file to the name of the current experiment. +# +#----------------------------------------------------------------------- +# + set_bash_param "${expt_config_fp}" "EXPT_SUBDIR" "${expt_subdir}" +# +#----------------------------------------------------------------------- +# +# Set any parameters in the experiment configuration file that have been +# assigned a value in the arguments list to this script (and thus are +# not empty). Any parameters that have not been assigned a value in the +# arguments list will retain their values in the baseline configuration +# file if they are specified in that file. If not, they will take on +# the default values specified in the default experiment configuration +# file in the workflow repository (config_defaults.sh). +# +#----------------------------------------------------------------------- +# + if [ ! -z "$machine" ]; then + set_bash_param "${expt_config_fp}" "MACHINE" "$machine" + fi + + if [ ! -z "$account" ]; then + set_bash_param "${expt_config_fp}" "ACCOUNT" "$account" + fi + + if [ ! -z "${use_cron_to_relaunch}" ]; then + set_bash_param "${expt_config_fp}" "USE_CRON_TO_RELAUNCH" "${use_cron_to_relaunch}" + fi + + if [ ! -z "${cron_relaunch_intvl_mnts}" ]; then + set_bash_param "${expt_config_fp}" "CRON_RELAUNCH_INTVL_MNTS" "${cron_relaunch_intvl_mnts}" + fi +# +#----------------------------------------------------------------------- +# +# Set the values of those parameters in the experiment configuration +# file that need to be adjusted from their baseline values (as specified +# in the current line of the experiments list file) to obtain the confi- +# guration file for the current experiment. +# +#----------------------------------------------------------------------- +# + printf "" + for (( j=0; j<${num_mod_vars}; j++ )); do + set_bash_param "${expt_config_fp}" "${modvar_name[$j]}" "${modvar_value[$j]}" + done +# +# Create a symlink called "config.sh" in ushdir that points to the cur- +# rent experiment's configuration file. This must be done because the +# experiment/workflow generation script assumes that this is the name +# and location of the configuration file to use to generate a new expe- +# riment and corresponding workflow. +# +# ln_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" + mv_vrfy -f "${expt_config_fp}" "$ushdir/config.sh" +# +#----------------------------------------------------------------------- +# +# Call the experiment/workflow generation script to generate an experi- +# ment directory and rocoto workflow XML for the current experiment. +# +#----------------------------------------------------------------------- +# + $ushdir/generate_FV3SAR_wflow.sh || \ + print_err_msg_exit "\ +Could not generate an experiment/workflow for the test specified by +expt_name: + expt_name = \"${expt_name}\"" + +done +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + diff --git a/tests/run_mltpl_expts.sh b/tests/run_mltpl_expts.sh deleted file mode 100755 index 13113db978..0000000000 --- a/tests/run_mltpl_expts.sh +++ /dev/null @@ -1,136 +0,0 @@ -#!/bin/bash - -set -u -# -#----------------------------------------------------------------------- -# -# Define directories. -# -#----------------------------------------------------------------------- -# -BASEDIR="$(pwd)/../.." -FV3SAR_WFLOW_DIR="$BASEDIR/regional_workflow" -USHDIR="$FV3SAR_WFLOW_DIR/ush" -TESTSDIR="$FV3SAR_WFLOW_DIR/tests" -# -#----------------------------------------------------------------------- -# -# Source useful functions (sort of like a library). -# -#----------------------------------------------------------------------- -# -. $USHDIR/source_funcs.sh -# -#----------------------------------------------------------------------- -# -# Check the number of arguments. -# -#----------------------------------------------------------------------- -# -if [ "$#" -ne 1 ]; then - - print_err_msg_exit "\ -Script \"$0\": -Incorrect number of arguments specified. Usage: - - $0 \${test_suite} - -where \${test_suite} is the name of the test suite to run. Each test -suite consists of one or more sets of FV3SAR experiment parameter val- -ues. The values that each parameter will take on in a given test suite -must be specified in a file named \"param_arrays.\${test_suite}.sh\" in the -same directory as this script." - -fi -# -#----------------------------------------------------------------------- -# -# Set the name of the test suite. Then set the name of the file that -# specifies the values that each parameter will take on and check that -# that file exists. -# -#----------------------------------------------------------------------- -# -test_suite=${1:-""} - -PARAM_ARRAYS_FN="param_arrays.${test_suite}.sh" -PARAM_ARRAYS_FP="$TESTSDIR/$PARAM_ARRAYS_FN" - -if [ ! -f ${PARAM_ARRAYS_FP} ]; then - print_err_msg_exit "\ -Script \"$0\": -The file specified by PARAM_ARRAYS_FP defining the arrays that specify -the values that each experiment parameter will take on does not exist: - PARAM_ARRAYS_FP = \"$PARAM_ARRAYS_FP\" -" -else - . ${PARAM_ARRAYS_FP} -fi -# -#----------------------------------------------------------------------- -# -# Print out information about the test suite to be run. -# -#----------------------------------------------------------------------- -# -all_vals_predef_domain_str=$(printf "\"%s\" " "${all_vals_predef_domain[@]}") -all_vals_grid_gen_method_str=$(printf "\"%s\" " "${all_vals_grid_gen_method[@]}") -all_vals_CCPP_str=$(printf "\"%s\" " "${all_vals_CCPP[@]}") -all_vals_phys_suite_str=$(printf "\"%s\" " "${all_vals_phys_suite[@]}") -all_vals_CDATE_str=$(printf "\"%s\" " "${all_vals_CDATE[@]}") -all_vals_fcst_len_hrs_str=$(printf "\"%s\" " "${all_vals_fcst_len_hrs[@]}") -all_vals_quilting_str=$(printf "\"%s\" " "${all_vals_quilting[@]}") - -print_info_msg "\ -Creating and launching workflows for test suite: - - test_suite = \"$test_suite\" - -The values that each experiment parameter will take on are: - - all_vals_predef_domain = ( $all_vals_predef_domain_str ) - all_vals_grid_gen_method = ( $all_vals_grid_gen_method_str ) - all_vals_CCPP = ( $all_vals_CCPP_str ) - all_vals_phys_suite = ( $all_vals_phys_suite_str ) - all_vals_CDATE = ( $all_vals_CDATE_str ) - all_vals_fcst_len_hrs = ( $all_vals_fcst_len_hrs_str ) - all_vals_quilting = ( $all_vals_quilting_str )" -# -#----------------------------------------------------------------------- -# -# Loop through all possible combinations of the specified parameter val- -# ues and create and run a workflow for each combination. -# -#----------------------------------------------------------------------- -# -for predef_domain in "${all_vals_predef_domain[@]}"; do - for grid_gen_method in "${all_vals_grid_gen_method[@]}"; do - for CCPP in "${all_vals_CCPP[@]}"; do - for phys_suite in "${all_vals_phys_suite[@]}"; do - for CDATE in "${all_vals_CDATE[@]}"; do - for quilting in "${all_vals_quilting[@]}"; do - for fcst_len_hrs in "${all_vals_fcst_len_hrs[@]}"; do -# -# In the call to the run_one_expt.sh script below, we place each varia- -# ble being passed in as an argument in double quotes. This ensures -# that empty variables are still recognized as arguments by the script -# (instead of being skipped over). -# - ./run_one_expt.sh \ - "$predef_domain" \ - "$grid_gen_method" \ - "$CCPP" \ - "$phys_suite" \ - "$CDATE" \ - "$fcst_len_hrs" \ - "$quilting" - - done - done - done - done - done - done -done - - diff --git a/regional/NCL_ICs_BCs/.gitignore b/ush/NCL/NCL_ICs_BCs/.gitignore similarity index 100% rename from regional/NCL_ICs_BCs/.gitignore rename to ush/NCL/NCL_ICs_BCs/.gitignore diff --git a/regional/NCL_ICs_BCs/adjust_longitude_range.ncl b/ush/NCL/NCL_ICs_BCs/adjust_longitude_range.ncl similarity index 100% rename from regional/NCL_ICs_BCs/adjust_longitude_range.ncl rename to ush/NCL/NCL_ICs_BCs/adjust_longitude_range.ncl diff --git a/regional/NCL_ICs_BCs/calc_field_stats.ncl b/ush/NCL/NCL_ICs_BCs/calc_field_stats.ncl similarity index 100% rename from regional/NCL_ICs_BCs/calc_field_stats.ncl rename to ush/NCL/NCL_ICs_BCs/calc_field_stats.ncl diff --git a/regional/NCL_ICs_BCs/generate_ICs_BCs.sh b/ush/NCL/NCL_ICs_BCs/generate_ICs_BCs.sh similarity index 100% rename from regional/NCL_ICs_BCs/generate_ICs_BCs.sh rename to ush/NCL/NCL_ICs_BCs/generate_ICs_BCs.sh diff --git a/regional/NCL_ICs_BCs/generate_RAP_based_ICs_BCs.ncl b/ush/NCL/NCL_ICs_BCs/generate_RAP_based_ICs_BCs.ncl similarity index 100% rename from regional/NCL_ICs_BCs/generate_RAP_based_ICs_BCs.ncl rename to ush/NCL/NCL_ICs_BCs/generate_RAP_based_ICs_BCs.ncl diff --git a/regional/NCL_ICs_BCs/get_rect_grid_bdy.ncl b/ush/NCL/NCL_ICs_BCs/get_rect_grid_bdy.ncl similarity index 100% rename from regional/NCL_ICs_BCs/get_rect_grid_bdy.ncl rename to ush/NCL/NCL_ICs_BCs/get_rect_grid_bdy.ncl diff --git a/regional/NCL_ICs_BCs/get_resized_viewport_dims.ncl b/ush/NCL/NCL_ICs_BCs/get_resized_viewport_dims.ncl similarity index 100% rename from regional/NCL_ICs_BCs/get_resized_viewport_dims.ncl rename to ush/NCL/NCL_ICs_BCs/get_resized_viewport_dims.ncl diff --git a/regional/NCL_ICs_BCs/interpolate_RAP_to_SARFV3.ncl b/ush/NCL/NCL_ICs_BCs/interpolate_RAP_to_SARFV3.ncl similarity index 100% rename from regional/NCL_ICs_BCs/interpolate_RAP_to_SARFV3.ncl rename to ush/NCL/NCL_ICs_BCs/interpolate_RAP_to_SARFV3.ncl diff --git a/regional/NCL_ICs_BCs/pause.ncl b/ush/NCL/NCL_ICs_BCs/pause.ncl similarity index 100% rename from regional/NCL_ICs_BCs/pause.ncl rename to ush/NCL/NCL_ICs_BCs/pause.ncl diff --git a/regional/NCL_ICs_BCs/plot_horiz_field.ncl b/ush/NCL/NCL_ICs_BCs/plot_horiz_field.ncl similarity index 100% rename from regional/NCL_ICs_BCs/plot_horiz_field.ncl rename to ush/NCL/NCL_ICs_BCs/plot_horiz_field.ncl diff --git a/regional/NCL_ICs_BCs/plot_model_field_generic.ncl b/ush/NCL/NCL_ICs_BCs/plot_model_field_generic.ncl similarity index 100% rename from regional/NCL_ICs_BCs/plot_model_field_generic.ncl rename to ush/NCL/NCL_ICs_BCs/plot_model_field_generic.ncl diff --git a/regional/NCL_ICs_BCs/read_FV3SAR_grid_halo.ncl b/ush/NCL/NCL_ICs_BCs/read_FV3SAR_grid_halo.ncl similarity index 100% rename from regional/NCL_ICs_BCs/read_FV3SAR_grid_halo.ncl rename to ush/NCL/NCL_ICs_BCs/read_FV3SAR_grid_halo.ncl diff --git a/regional/NCL_ICs_BCs/read_RAP_grid.ncl b/ush/NCL/NCL_ICs_BCs/read_RAP_grid.ncl similarity index 100% rename from regional/NCL_ICs_BCs/read_RAP_grid.ncl rename to ush/NCL/NCL_ICs_BCs/read_RAP_grid.ncl diff --git a/regional/NCL_ICs_BCs/read_RAP_horiz_field.ncl b/ush/NCL/NCL_ICs_BCs/read_RAP_horiz_field.ncl similarity index 100% rename from regional/NCL_ICs_BCs/read_RAP_horiz_field.ncl rename to ush/NCL/NCL_ICs_BCs/read_RAP_horiz_field.ncl diff --git a/regional/NCL_ICs_BCs/set_cnLevels_lbLabels.ncl b/ush/NCL/NCL_ICs_BCs/set_cnLevels_lbLabels.ncl similarity index 100% rename from regional/NCL_ICs_BCs/set_cnLevels_lbLabels.ncl rename to ush/NCL/NCL_ICs_BCs/set_cnLevels_lbLabels.ncl diff --git a/regional/NCL_ICs_BCs/set_plot_header.ncl b/ush/NCL/NCL_ICs_BCs/set_plot_header.ncl similarity index 100% rename from regional/NCL_ICs_BCs/set_plot_header.ncl rename to ush/NCL/NCL_ICs_BCs/set_plot_header.ncl diff --git a/regional/NCL_ICs_BCs/special_chars.ncl b/ush/NCL/NCL_ICs_BCs/special_chars.ncl similarity index 100% rename from regional/NCL_ICs_BCs/special_chars.ncl rename to ush/NCL/NCL_ICs_BCs/special_chars.ncl diff --git a/regional/NCL_ICs_BCs/strcmp.ncl b/ush/NCL/NCL_ICs_BCs/strcmp.ncl similarity index 100% rename from regional/NCL_ICs_BCs/strcmp.ncl rename to ush/NCL/NCL_ICs_BCs/strcmp.ncl diff --git a/ush/NCL/calc_wrtcmp_grid_params.ncl b/ush/NCL/calc_wrtcmp_grid_params.ncl new file mode 100644 index 0000000000..d24627e0c2 --- /dev/null +++ b/ush/NCL/calc_wrtcmp_grid_params.ncl @@ -0,0 +1,487 @@ +; +; ********************************************************************** +; +; Load files. +; +; ********************************************************************** +; +loadscript(lib_location + "pause.ncl") +loadscript(lib_location + "constants.ncl") +loadscript(lib_location + "strcmp_exact.ncl") +loadscript(lib_location + "strpad.ncl") +loadscript(lib_location + "repeat_str.ncl") +loadscript(lib_location + "convert_from_to_sphr_coords_to_from_rotated_sphr.ncl") +loadscript(lib_location + "convert_sphr_coords_to_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_lambert_cnfrml_coords_to_sphr.ncl") + +undef("calc_wrtcmp_grid_params") + +function calc_wrtcmp_grid_params( \ + wrtcmp_config_fn:string, \ + lon_ctr_native:snumeric, lat_ctr_native:snumeric, \ + lon_tile_corners_face_midpts_native[8]:snumeric, \ + lat_tile_corners_face_midpts_native[8]:snumeric, \ + dx_native:snumeric, dy_native:snumeric, \ + angle_units:string) + +local lon_ctr, lat_ctr, lat1, lat2, \ + x_SW_native, y_SW_native, \ + num_gap_cells, \ + dx, x_SW, x_NE, Lx, Lx_ovr_dx, nx, frac_x, x_increment, \ + dy, y_SW, y_NE, Ly, Ly_ovr_dy, ny, frac_y, y_increment, \ + sphr_coords, lon_SW, lat_SW, \ + param_names, num_params, param_names_and_vals, trailing_comments, \ + np, param_name, param_value, param_value_str, \ + regex_search, regex_print, sed_cmd, \ + fmt_str, msg, \ + str_lens, str_len_max, num_delimit_spaces, delimit_spaces, \ + lines_final, regex_replace, sed_output, \ + out + +begin +; +; ********************************************************************** +; +; If not already defined, define the string (separator_line) that serves +; as a separator line between different sections of printout. +; +; ********************************************************************** +; + if (.not. isvar("separator_line")) then + separator_line := repeat_str("=", 72) + end if +; +; ********************************************************************** +; +; Set the coordinates of the center of the write-component output grid +; to be equal to those of the native grid. Also, set the latitudes de- +; fining the two standard parallels of the Lambert conformal projection +; used by the output grid to the latitude of the output grid center. +; +; ********************************************************************** +; +; lon_ctr = lon_ctr_native +; lat_ctr = lat_ctr_native +; lat1 = lat_ctr +; lat2 = lat_ctr +; +; ********************************************************************** +; +; Calculate the Lambert coordinates of the southwest corner of the na- +; tive grid from its spherical coordinates. +; +; ********************************************************************** +; + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + lon_ctr = lon_ctr_native + lat_ctr = lat_ctr_native + + rotated_sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, 1, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native) + lon_verts = sphr_coords@lon_out + lat_verts = sphr_coords@lat_out + x_tile_corners_face_midpts_native = rotated_sphr_coords@lon_out + y_tile_corners_face_midpts_native = rotated_sphr_coords@lat_out + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + lon_ctr = lon_ctr_native + lat_ctr = lat_ctr_native + lat1 = lat_ctr + lat2 = lat_ctr + + lambert_coords \ + := convert_sphr_coords_to_lambert_cnfrml( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native) + x_tile_corners_face_midpts_native = lambert_coords@x + y_tile_corners_face_midpts_native = lambert_coords@y + + end if + end if + + i = 0 + x_SW_native = x_tile_corners_face_midpts_native(i) + y_SW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_S_native = x_tile_corners_face_midpts_native(i) + y_S_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_SE_native = x_tile_corners_face_midpts_native(i) + y_SE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_E_native = x_tile_corners_face_midpts_native(i) + y_E_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NE_native = x_tile_corners_face_midpts_native(i) + y_NE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_N_native = x_tile_corners_face_midpts_native(i) + y_N_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NW_native = x_tile_corners_face_midpts_native(i) + y_NW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_W_native = x_tile_corners_face_midpts_native(i) + y_W_native = y_tile_corners_face_midpts_native(i) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + dx = dx_native + dy = dy_native +; num_margin_cells = 1 + num_margin_cells = 5 +; num_margin_cells = 100 +; +; ********************************************************************** +; +; Reduce the extent of the write-component grid in both the positive and +; negative x directions until the latitude of the center of the west +; face of the write-component grid is greater than that of the native +; grid, and the latitude of the center of the east face of the write- +; component grid is less than that of the native grid (i.e. the write- +; component grid lies within the native grid in the x direction). Then, +; as an extra safety measure, reduce each of these extents by a further +; nc_reduce_extra_max cells of size dx. +; +; ********************************************************************** +; + x_W_native_max = max((/x_SW_native, x_W_native, x_NW_native/)) + x_E_native_min = min((/x_SE_native, x_E_native, x_NE_native/)) + + x_W = x_W_native_max + num_margin_cells*dx + x_E = x_E_native_min - num_margin_cells*dx + + Lx = x_E - x_W + Lx_ovr_dx = Lx/dx + nx = tointeger(Lx_ovr_dx) + frac_x = Lx_ovr_dx - nx + x_adj = (0.5d+0*frac_x)*dx + x_W = x_W + x_adj + x_E = x_E - x_adj +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + y_S_native_max = max((/y_SW_native, y_S_native, y_SE_native/)) + y_N_native_min = min((/y_NW_native, y_N_native, y_NE_native/)) + + y_S = y_S_native_max + num_margin_cells*dy + y_N = y_N_native_min - num_margin_cells*dy + + y_S_tmp = y_S + y_N_tmp = y_N + y_S = -min(abs((/y_S_tmp, y_N_tmp/))) + y_N = -y_S + + Ly = y_N - y_S + Ly_ovr_dy = Ly/dy + ny = tointeger(Ly_ovr_dy) + frac_y = Ly_ovr_dy - ny + y_adj = (0.5d+0*frac_y)*dy + y_S = y_S + y_adj + y_N = y_N - y_adj +; +; ********************************************************************** +; +; Calculate the spherical coordinates of the southwest corner of the na- +; tive grid from its Lambert coordinates. +; +; ********************************************************************** +; + x_W = x_W + 0.5*dx + x_E = x_E - 0.5*dx + + y_S = y_S + 0.5*dy + y_N = y_N - 0.5*dy + + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, -1, \ + x_W, y_S) + lon_SW = sphr_coords@lon_out + lat_SW = sphr_coords@lat_out + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + sphr_coords \ + := convert_lambert_cnfrml_coords_to_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_W, y_S) + lon_SW = sphr_coords@lon + lat_SW = sphr_coords@lat + end if + end if +; +; ********************************************************************** +; +; Create a string array containing the names of the Lambert conformal +; output grid parameters that appear in the NEMS model_configure file. +; +; ********************************************************************** +; + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + param_names = (/ \ + "output_grid", \ + "cen_lon", \ + "cen_lat", \ + "lon1", \ + "lat1", \ + "lon2", \ + "lat2", \ + "dlon", \ + "dlat" /) + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + param_names = (/ \ + "output_grid", \ + "cen_lon", \ + "cen_lat", \ + "stdlat1", \ + "stdlat2", \ + "nx", \ + "ny", \ + "lon1", \ + "lat1", \ + "dx", \ + "dy" /) + + end if + end if +; +; ********************************************************************** +; +; Get the number of Lambert conformal output grid parameters that need +; to be se tin the NEMS model_configure file. Then initialize string +; arrays needed in setting these parameters. +; +; ********************************************************************** +; + num_params = dimsizes(param_names) + param_names_and_vals := new(num_params, "string") + trailing_comments := new(num_params, "string") +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + np = 0 + param_name = param_names(np) +; param_value := char_sq + "lambert_conformal" + char_sq + param_value := char_sq + wrtcmp_coord_sys + char_sq + param_value_str := tostring(param_value) + + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) + + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + do np=1, num_params-1 + + param_name := param_names(np) + unrecognized_param = False + + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + if (strcmp_exact(param_name, "cen_lon")) then + param_value := lon_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "cen_lat")) then + param_value := lat_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon1")) then + param_value := rot_lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat1")) then + param_value := rot_lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon2")) then + param_value := rot_lon_NE + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat2")) then + param_value := rot_lat_NE + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlon")) then + param_value := dlon + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlat")) then + param_value := dlat + fmt_str = "%16.8f" + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + if (strcmp_exact(param_name, "cen_lon")) then + param_value := lon_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "cen_lat")) then + param_value := lat_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "stdlat1")) then + param_value := lat1 + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "stdlat2")) then + param_value := lat2 + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "nx")) then + param_value := nx + fmt_str = "%10.0f" + else if (strcmp_exact(param_name, "ny")) then + param_value := ny + fmt_str = "%10.0f" + else if (strcmp_exact(param_name, "lon1")) then + param_value := lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat1")) then + param_value := lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dx")) then + param_value := dx + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dy")) then + param_value := dy + fmt_str = "%16.8f" + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + + end if + end if +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + if (unrecognized_param) then + msg := char_nl + \ +"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ +" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + print("" + msg) + exit + end if +; +; Generate a string containing the parameter value and formatted as spe- +; cified by fmt_str. Then strip any leading and trailing whitespace +; from it. +; + param_value_str := sprintf(fmt_str, param_value) + param_value_str := str_strip(param_value_str) +; +; Set the regular expression to search for. +; + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" +; +; Get the parameter name and value without the trailing comment (if any). +; + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) +; +; Get the trailing name and comment. +; + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) + + end do +; +; ********************************************************************** +; +; Generate a string array containing each line in the model_configure +; file that specifies a parameter describing the write-component output +; grid. Each such line will contain the parameter name, value, and any +; trailing comments, with the trailing comments aligned for readability. +; +; ********************************************************************** +; + lines_final := strpad(param_names_and_vals, " ", "right") + lines_final := lines_final + " " + trailing_comments +; +; ********************************************************************** +; +; Loop through the set of parameters and find the line in the template +; file where each is set. Then replace that line with the corresponding +; line generated above containing the parameter name, its value, and the +; optional aligned comment. +; +; ********************************************************************** +; + do np=0, num_params-1 + param_name = param_names(np) + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + regex_replace = lines_final(np) + sed_cmd = "sed -i -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_replace + "|" + char_dq + " " + wrtcmp_config_fn + sed_output = systemfunc(sed_cmd) + end do +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + out = True + return(out) + +end + diff --git a/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl b/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl index fdfdd94d19..d747f33d04 100644 --- a/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl +++ b/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl @@ -10,8 +10,8 @@ loadscript(lib_location + "constants.ncl") loadscript(lib_location + "strcmp_exact.ncl") loadscript(lib_location + "strpad.ncl") loadscript(lib_location + "repeat_str.ncl") -loadscript(lib_location + "calc_lambert_cnfrml_coords_from_sphr.ncl") -loadscript(lib_location + "calc_sphr_coords_from_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_sphr_coords_to_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_lambert_cnfrml_coords_to_sphr.ncl") undef("calc_wrtcmp_grid_params_lambert_cnfrml") @@ -72,7 +72,7 @@ begin ; ********************************************************************** ; lambert_coords \ - := calc_lambert_cnfrml_coords_from_sphr( \ + := convert_sphr_coords_to_lambert_cnfrml( \ lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ lon_tile_corners_face_midpts_native, \ lat_tile_corners_face_midpts_native) @@ -190,7 +190,7 @@ begin y_N = y_N - 0.5*dy sphr_coords \ - := calc_sphr_coords_from_lambert_cnfrml( \ + := convert_lambert_cnfrml_coords_to_sphr( \ lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ x_W, y_S) lon_SW = sphr_coords@lon @@ -274,11 +274,9 @@ begin param_value := lat2 fmt_str = "%16.8f" else if (strcmp_exact(param_name, "nx")) then -; param_value := nxm param_value := nx fmt_str = "%10.0f" else if (strcmp_exact(param_name, "ny")) then -; param_value := nym param_value := ny fmt_str = "%10.0f" else if (strcmp_exact(param_name, "lon1")) then @@ -296,7 +294,7 @@ begin else msg := char_nl + \ -"Unknow parameter name specified for given output_coord_sys:" + char_nl + \ +"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ " output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ " param_name = " + char_dq + param_name + char_dq + char_nl + \ "Stopping." @@ -355,6 +353,17 @@ begin ; ; ********************************************************************** ; +; Print out the write-component parameter values calculated above. +; +; ********************************************************************** +; + msg := char_nl + \ +"Write-component parameters corresponding to this native grid are:" + char_nl + print("" + msg) + print("" + lines_final) +; +; ********************************************************************** +; ; Loop through the set of parameters and find the line in the template ; file where each is set. Then replace that line with the corresponding ; line generated above containing the parameter name, its value, and the diff --git a/ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl b/ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl new file mode 100644 index 0000000000..4038b69ca5 --- /dev/null +++ b/ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl @@ -0,0 +1,382 @@ +; +; ********************************************************************** +; +; Load files. +; +; ********************************************************************** +; +loadscript(lib_location + "pause.ncl") +loadscript(lib_location + "constants.ncl") +loadscript(lib_location + "strcmp_exact.ncl") +loadscript(lib_location + "strpad.ncl") +loadscript(lib_location + "repeat_str.ncl") +loadscript(lib_location + "calc_rotated_sphr_coords_from_sphr.ncl") +loadscript(lib_location + "calc_sphr_coords_from_rotated_sphr.ncl") + +undef("calc_wrtcmp_grid_params_rotated_latlon") + +function calc_wrtcmp_grid_params_rotated_latlon( \ + wrtcmp_config_fn:string, \ + lon_ctr_native:snumeric, lat_ctr_native:snumeric, \ + lon_tile_corners_face_midpts_native[8]:snumeric, \ + lat_tile_corners_face_midpts_native[8]:snumeric, \ + dx_native:snumeric, dy_native:snumeric, \ + angle_units:string) + +local lon_ctr, lat_ctr, lat1, lat2, \ + x_SW_native, y_SW_native, \ + num_gap_cells, \ + dx, x_SW, x_NE, Lx, Lx_ovr_dx, nx, frac_x, x_increment, \ + dy, y_SW, y_NE, Ly, Ly_ovr_dy, ny, frac_y, y_increment, \ + sphr_coords, lon_SW, lat_SW, \ + param_names, num_params, param_names_and_vals, trailing_comments, \ + np, param_name, param_value, param_value_str, \ + regex_search, regex_print, sed_cmd, \ + fmt_str, msg, \ + str_lens, str_len_max, num_delimit_spaces, delimit_spaces, \ + lines_final, regex_replace, sed_output, \ + out + +begin +; +; ********************************************************************** +; +; If not already defined, define the string (separator_line) that serves +; as a separator line between different sections of printout. +; +; ********************************************************************** +; + if (.not. isvar("separator_line")) then + separator_line := repeat_str("=", 72) + end if +; +; ********************************************************************** +; +; Set the coordinates of the center of the write-component output grid +; to be equal to those of the native grid. Also, set the latitudes de- +; fining the two standard parallels of the Lambert conformal projection +; used by the output grid to the latitude of the output grid center. +; +; ********************************************************************** +; + lon_ctr = lon_ctr_native + lat_ctr = lat_ctr_native + lat1 = lat_ctr + lat2 = lat_ctr +; +; ********************************************************************** +; +; Calculate the Lambert coordinates of the southwest corner of the na- +; tive grid from its spherical coordinates. +; +; ********************************************************************** +; + lambert_coords \ + := calc_rotated_sphr_coords_from_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, angle_units, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native) + x_tile_corners_face_midpts_native = lambert_coords@x + y_tile_corners_face_midpts_native = lambert_coords@y + + i = 0 + x_SW_native = x_tile_corners_face_midpts_native(i) + y_SW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_S_native = x_tile_corners_face_midpts_native(i) + y_S_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_SE_native = x_tile_corners_face_midpts_native(i) + y_SE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_E_native = x_tile_corners_face_midpts_native(i) + y_E_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NE_native = x_tile_corners_face_midpts_native(i) + y_NE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_N_native = x_tile_corners_face_midpts_native(i) + y_N_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NW_native = x_tile_corners_face_midpts_native(i) + y_NW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_W_native = x_tile_corners_face_midpts_native(i) + y_W_native = y_tile_corners_face_midpts_native(i) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + dx = dx_native + dy = dy_native +; num_margin_cells = 1 + num_margin_cells = 5 +; num_margin_cells = 100 +; +; ********************************************************************** +; +; Reduce the extent of the write-component grid in both the positive and +; negative x directions until the latitude of the center of the west +; face of the write-component grid is greater than that of the native +; grid, and the latitude of the center of the east face of the write- +; component grid is less than that of the native grid (i.e. the write- +; component grid lies within the native grid in the x direction). Then, +; as an extra safety measure, reduce each of these extents by a further +; nc_reduce_extra_max cells of size dx. +; +; ********************************************************************** +; + x_W_native_max = max((/x_SW_native, x_W_native, x_NW_native/)) + x_E_native_min = min((/x_SE_native, x_E_native, x_NE_native/)) + + x_W = x_W_native_max + num_margin_cells*dx + x_E = x_E_native_min - num_margin_cells*dx + + Lx = x_E - x_W + Lx_ovr_dx = Lx/dx + nx = tointeger(Lx_ovr_dx) + frac_x = Lx_ovr_dx - nx + x_adj = (0.5d+0*frac_x)*dx + x_W = x_W + x_adj + x_E = x_E - x_adj +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + y_S_native_max = max((/y_SW_native, y_S_native, y_SE_native/)) + y_N_native_min = min((/y_NW_native, y_N_native, y_NE_native/)) + + y_S = y_S_native_max + num_margin_cells*dy + y_N = y_N_native_min - num_margin_cells*dy + + y_S_tmp = y_S + y_N_tmp = y_N + y_S = -min(abs((/y_S_tmp, y_N_tmp/))) + y_N = -y_S + + Ly = y_N - y_S + Ly_ovr_dy = Ly/dy + ny = tointeger(Ly_ovr_dy) + frac_y = Ly_ovr_dy - ny + y_adj = (0.5d+0*frac_y)*dy + y_S = y_S + y_adj + y_N = y_N - y_adj +; +; ********************************************************************** +; +; Calculate the spherical coordinates of the southwest corner of the na- +; tive grid from its Lambert coordinates. +; +; ********************************************************************** +; + x_W = x_W + 0.5*dx + x_E = x_E - 0.5*dx + + y_S = y_S + 0.5*dy + y_N = y_N - 0.5*dy + + sphr_coords \ + := calc_sphr_coords_from_rotated_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_W, y_S) + lon_SW = sphr_coords@lon + lat_SW = sphr_coords@lat +; +; ********************************************************************** +; +; Create a string array containing the names of the Lambert conformal +; output grid parameters that appear in the NEMS model_configure file. +; +; ********************************************************************** +; + param_names = (/ \ + "output_grid", \ + "cen_lon", \ + "cen_lat", \ + "lon1", \ + "lat1", \ + "lon2", \ + "lat2", \ + "dlon", \ + "dlat" /) +; +; ********************************************************************** +; +; Get the number of Lambert conformal output grid parameters that need +; to be se tin the NEMS model_configure file. Then initialize string +; arrays needed in setting these parameters. +; +; ********************************************************************** +; + num_params = dimsizes(param_names) + param_names_and_vals := new(num_params, "string") + trailing_comments := new(num_params, "string") +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + np = 0 + param_name = param_names(np) + param_value := char_sq + "rotated_latlon" + char_sq + param_value_str := tostring(param_value) + + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) + + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + do np=1, num_params-1 + + param_name := param_names(np) + + if (strcmp_exact(param_name, "cen_lon")) then + param_value := lon_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "cen_lat")) then + param_value := lat_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon1")) then + param_value := lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat1")) then + param_value := lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon2")) then + param_value := lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat2")) then + param_value := lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlon")) then + param_value := dx + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlat")) then + param_value := dy + fmt_str = "%16.8f" + else + + msg := char_nl + \ +"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ +" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + print("" + msg) + exit + + end if + end if + end if + end if + end if + end if + end if + end if +; +; Generate a string containing the parameter value and formatted as spe- +; cified by fmt_str. Then strip any leading and trailing whitespace +; from it. +; + param_value_str := sprintf(fmt_str, param_value) + param_value_str := str_strip(param_value_str) +; +; Set the regular expression to search for. +; + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" +; +; Get the parameter name and value without the trailing comment (if any). +; + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) +; +; Get the trailing name and comment. +; + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) + + end do +; +; ********************************************************************** +; +; Generate a string array containing each line in the model_configure +; file that specifies a parameter describing the write-component output +; grid. Each such line will contain the parameter name, value, and an +; trailing comment, with the trailing comments aligned for readability. +; +; ********************************************************************** +; + lines_final := strpad(param_names_and_vals, " ", "right") + lines_final := lines_final + " " + trailing_comments +; +; ********************************************************************** +; +; Print out the write-component parameter values calculated above. +; +; ********************************************************************** +; + msg := char_nl + \ +"Write-component parameters corresponding to this native grid are:" + char_nl + print("" + msg) + print("" + lines_final) +; +; ********************************************************************** +; +; Loop through the set of parameters and find the line in the template +; file where each is set. Then replace that line with the corresponding +; line generated above containing the parameter name, its value, and the +; optional aligned comment. +; +; ********************************************************************** +; + do np=0, num_params-1 + param_name = param_names(np) + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + regex_replace = lines_final(np) + sed_cmd = "sed -i -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_replace + "|" + char_dq + " " + wrtcmp_config_fn + sed_output = systemfunc(sed_cmd) + end do +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + out = True + return(out) + +end + diff --git a/ush/NCL/find_wrtcmp_grid_params.ncl b/ush/NCL/find_wrtcmp_grid_params.ncl new file mode 100644 index 0000000000..149b12f895 --- /dev/null +++ b/ush/NCL/find_wrtcmp_grid_params.ncl @@ -0,0 +1,294 @@ +; +; ********************************************************************** +; +; Declare global variables before loading files. This has the same ef- +; fect as declaring these variables on the command line. +; +; ********************************************************************** +; +;help = True + +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/test_latest_20191002/expt_dirs/test_sheleg_GSD_HRRR3km_01" + +; +; ********************************************************************** +; +; Load external files. +; +; ********************************************************************** +; +lib_location = "lib/" + +loadscript(lib_location + "special_chars.ncl") +loadscript(lib_location + "pause.ncl") +loadscript(lib_location + "strcmp_exact.ncl") + +load "calc_wrtcmp_grid_params_rotated_latlon.ncl" +load "calc_wrtcmp_grid_params_lambert_cnfrml.ncl" +load "read_FV3SAR_grid_native.ncl" + +begin +; +; ********************************************************************** +; +; Set the name of the current script or function. We have to do this +; manually because NCL does not seem to have a built-in method of ob- +; taining this information. +; +; ********************************************************************** +; + curnt_script_proc_func_name := "find_wrtcmp_grid_params(...)" +; +; ********************************************************************** +; +; Set usage message. +; +; ********************************************************************** +; + usage_msg = \ +" ncl -n find_wrtcmp_grid_params.ncl \" + char_nl + \ +" need to fill in the rest of this message" +; +; ********************************************************************** +; +; Set help message. The help message contains the documentation for +; this script and thus should reflect any changes to the code. +; +; ********************************************************************** +; + help_msg = char_nl + \ +"Need to fill in this help message." +; +; ********************************************************************** +; +; If the variable "help" is specified on the command line and is set to +; True, print out the help message and exit. +; +; ********************************************************************** +; + if (isvar("help")) then + if (help .eq. True) then + print("" + help_msg) + exit + end if + else + help = False + end if +; +; ********************************************************************** +; +; Set the full path to the varible defintions file for this experiment. +; Then read various parameters from it. +; +; ********************************************************************** +; + var_defns_fp = expt_dir + "/var_defns.sh" + + param_names = (/ \ +"WRTCMP_PARAMS_TEMPLATE_FP", \ +"WRTCMP_output_grid", \ +"gtype", \ +"CRES", \ +"delx", \ +"dely", \ +"nh4_T7" /) + + num_params = dimsizes(param_names) + do np=0, num_params-1 + + param_name = param_names(np) + regex_search = "^\s*" + param_name + "=(" + char_dq + "([^" \ + + char_dq + "]+)" + char_dq + "|([^ " + char_dq + "]+))(.*)$" + regex_print = "\2\3" + sed_cmd = "sed --regexp-extended --silent --expression " + char_sq \ + + "s/" + regex_search + "/" + regex_print + "/p" + char_sq \ + + " " + var_defns_fp + sed_output = systemfunc(sed_cmd) +; +; Convert the output from the sed command (which will be a string) to +; the appropriate NCL data type. +; + if (strcmp_exact(param_name, "WRTCMP_PARAMS_TEMPLATE_FP")) then + WRTCMP_PARAMS_TEMPLATE_FP = tostring(sed_output) + else if (strcmp_exact(param_name, "WRTCMP_output_grid")) then + WRTCMP_output_grid = tostring(sed_output) + else if (strcmp_exact(param_name, "gtype")) then + gtype = tostring(sed_output) + else if (strcmp_exact(param_name, "CRES")) then + CRES = tostring(sed_output) + else if (strcmp_exact(param_name, "delx")) then + dx_native = todouble(sed_output) + else if (strcmp_exact(param_name, "dely")) then + dy_native = todouble(sed_output) + else if (strcmp_exact(param_name, "nh4_T7")) then + nhalo_T7 = tointeger(sed_output) + else + + msg := char_nl + \ +"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +"The data type to convert the current variable defintions file parameter" + char_nl + \ +"to has not been spedified:" + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + char_nl + print("" + msg) + exit + + end if + end if + end if + end if + end if + end if + end if + + end do +; +; ********************************************************************** +; +; Check that gtype has the proper value. +; +; ********************************************************************** +; + if (strcmp_exact(gtype, "regional")) then + + inds_tiles_to_plot = (/ 7 /) + + else + + msg := char_nl + \ +"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +"This script is designed to handle only regional grids. Thus, gtype" + char_nl + \ +"may only be set to " + char_dq + "regional" + char_dq + ":" + char_nl + \ +" gtype = " + char_dq + gtype + char_dq + char_nl + \ +"Stopping." + char_nl + + print("" + msg) + exit + + end if +; +; ********************************************************************** +; +; Read in the native FV3SAR grid. +; +; ********************************************************************** +; + remove_rgnl_halo = True + get_tile_bdies = False + + grid_info := read_FV3SAR_grid_native( \ + expt_dir, \ + gtype, \ + CRES, \ + inds_tiles_to_plot, \ + get_tile_bdies, \ + nhalo_T7, \ + remove_rgnl_halo) + + lon_tile_cntr_tiles_to_plot = grid_info@lon_tile_cntr_all_tiles + lat_tile_cntr_tiles_to_plot = grid_info@lat_tile_cntr_all_tiles + + lon_tile_corners_face_midpts_tiles_to_plot \ + = grid_info@lon_tile_corners_face_midpts_all_tiles + lat_tile_corners_face_midpts_tiles_to_plot \ + = grid_info@lat_tile_corners_face_midpts_all_tiles +; +; ********************************************************************** +; +; Set the full path to the file that will contain a copy of the write- +; component template file with placeholders replaced with actual values. +; Then copy the template file to that file. +; +; ********************************************************************** +; + wrtcmp_config_fp = expt_dir + "/aaaa" + sys_output = systemfunc("cp " + WRTCMP_PARAMS_TEMPLATE_FP + \ + " " + wrtcmp_config_fp) +; +; ********************************************************************** +; +; Extract and save into new, appropriately dimensioned variables the +; output from the grid-read operation above. Then call the function +; that calculates the write-component grid parameters for a lambert- +; conformal grid that is guaranteed to lie completely inside the native +; FV3SAR grid. +; +; ********************************************************************** +; + nn = 0 + + lon_grid_cntr_native = lon_tile_cntr_tiles_to_plot(nn) + lat_grid_cntr_native = lat_tile_cntr_tiles_to_plot(nn) + + lon_tile_corners_face_midpts_native \ + := lon_tile_corners_face_midpts_tiles_to_plot(nn,:) + lat_tile_corners_face_midpts_native \ + := lat_tile_corners_face_midpts_tiles_to_plot(nn,:) + + angle_units = "deg" + + valid_vals_WRTCMP_output_grid \ + := (/ "rotated_latlon", "lambert_conformal" /) +;WRTCMP_output_grid = "rotated_latlon" + + if (strcmp_exact(valid_vals_WRTCMP_output_grid, WRTCMP_output_grid)) then + + if (strcmp_exact(WRTCMP_output_grid, "rotated_latlon")) then + + out := calc_wrtcmp_grid_params_rotated_latlon( \ + wrtcmp_config_fp, \ + lon_grid_cntr_native, lat_grid_cntr_native, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native, \ + dx_native, dy_native, \ + angle_units) + + else if (strcmp_exact(WRTCMP_output_grid, "lambert_conformal")) then + + out := calc_wrtcmp_grid_params_lambert_cnfrml( \ + wrtcmp_config_fp, \ + lon_grid_cntr_native, lat_grid_cntr_native, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native, \ + dx_native, dy_native, \ + angle_units) + +; else if (strcmp_exact(WRTCMP_output_grid, "rotated_latlon")) then +; +; msg := char_nl + \ +;"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +;"Function to calculate write-component output grid parameters for an " + char_nl + \ +;"output grid of type " + char_dq + "rotated_latlon" + char_dq + " has " + \ +;"not yet been written:" + char_nl + \ +;" WRTCMP_output_grid = " + char_dq + WRTCMP_output_grid + char_dq + char_nl + \ +;"Stopping." + char_nl +; print("" + msg) +; exit + + end if + end if + + else + + valid_vals_str := tostring(valid_vals_WRTCMP_output_grid) + valid_vals_str := str_join(valid_vals_str, char_dq + ", " + char_dq) + valid_vals_str := "(/ " + char_dq + valid_vals_str + char_dq + " /)" + + msg := char_nl + \ +"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +"Specified type of write-component output grid is not currently support-" + char_nl + \ +"ed:" + char_nl + \ +" WRTCMP_output_grid = " + char_dq + WRTCMP_output_grid + char_dq + char_nl + \ +"Currently, this script can generate write-component parameters only for" + char_nl + \ +"output grids of the following types:" + char_nl + \ +" valid_vals_WRTCMP_output_grid = " + valid_vals_str + char_nl + \ +"Stopping." + char_nl + print("" + msg) + exit + + end if + + print("") + print("Done calculating write-component grid parameters.") + +end diff --git a/ush/NCL/get_wrtcmp_grid.ncl b/ush/NCL/get_wrtcmp_grid.ncl index 48dc251a81..e24e333b56 100644 --- a/ush/NCL/get_wrtcmp_grid.ncl +++ b/ush/NCL/get_wrtcmp_grid.ncl @@ -7,8 +7,9 @@ ; loadscript(lib_location + "pause.ncl") loadscript(lib_location + "constants.ncl") -loadscript(lib_location + "calc_lambert_cnfrml_coords_from_sphr.ncl") -loadscript(lib_location + "calc_sphr_coords_from_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_from_to_sphr_coords_to_from_rotated_sphr.ncl") +loadscript(lib_location + "convert_sphr_coords_to_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_lambert_cnfrml_coords_to_sphr.ncl") undef("get_wrtcmp_grid") @@ -18,7 +19,7 @@ function get_wrtcmp_grid( \ local proj_params, \ var_name, regex_search, regex_print, sed_cmd, \ - output_coord_sys, valid_vals_output_coord_sys, valid_vals, msg, \ + wrtcmp_coord_sys, valid_vals_wrtcmp_coord_sys, valid_vals, msg, \ param_names, coord_data_type, num_params, param_name, \ lon_ctr_rad, lat_ctr_rad, lat1_rad, lat2_rad, \ nxm, nyp, lon_cell_cntr_SW, lat_cell_cntr_SW, dx, dy, \ @@ -48,7 +49,8 @@ begin ; ; ********************************************************************** ; -; Get the coordinate system used by the write-component output grid. +; Get the coordinate system in which the write-component output grid is +; specified. ; ; ********************************************************************** ; @@ -57,7 +59,7 @@ begin regex_print = "\1" sed_cmd = "sed -r -n -e " + char_dq + "s|" + regex_search + "|" \ + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn - output_coord_sys = systemfunc(sed_cmd) + wrtcmp_coord_sys = systemfunc(sed_cmd) ; ; ********************************************************************** ; @@ -65,19 +67,19 @@ begin ; ; ********************************************************************** ; - valid_vals_output_coord_sys = (/ "rotated_latlon", "lambert_conformal" /) + valid_vals_wrtcmp_coord_sys = (/ "rotated_latlon", "lambert_conformal" /) - if (.not. strcmp_exact(valid_vals_output_coord_sys, output_coord_sys)) then + if (.not. strcmp_exact(valid_vals_wrtcmp_coord_sys, wrtcmp_coord_sys)) then valid_vals \ := char_dq \ - + str_join(valid_vals_output_coord_sys, char_dq + ", " + char_dq) \ + + str_join(valid_vals_wrtcmp_coord_sys, char_dq + ", " + char_dq) \ + char_dq msg := char_nl + \ -"The ouput coordinate system (output_coord_sys) is not set to a valid " + char_nl + \ -"value:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +"The coordinate system in which the write-component output grid is spe-" + char_nl + \ +"cified (wrtcmp_coord_sys) has not been set to a valid value:" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ "Valid values are:" + char_nl + \ " " + valid_vals + char_nl + \ "Please rerun with a valid grid type. Stopping." @@ -93,9 +95,9 @@ begin ; ********************************************************************** ; msg := char_nl + \ -"The ouput coordinate system (output_coord_sys) used by the write-compo-" + char_nl + \ -"nent is:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl +"The coordinate system in which the write-component output grid is spe-" + char_nl + \ +"cified (wrtcmp_coord_sys) is:" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl print("" + msg) ; ; ********************************************************************** @@ -106,7 +108,19 @@ begin ; ; ********************************************************************** ; - if (strcmp_exact(output_coord_sys, "lambert_conformal")) then + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + param_names = (/ \ + "cen_lon", \ + "cen_lat", \ + "lon1", \ + "lat1", \ + "lon2", \ + "lat2", \ + "dlon", \ + "dlat" /) + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then param_names = (/ \ "cen_lon", \ @@ -123,19 +137,21 @@ begin else msg := char_nl + \ -"param_names has not been set for this output coordinate system:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +"param_names has not been set for this value of the write-component out-" + char_nl + \ +"put grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ "Stopping." print("" + msg) exit end if + end if ; ; ********************************************************************** ; ; Set the data type (i.e. float or double) of the coordinate arrays for ; the write-component output grid to be "double". Note that here, we -; constructing this grid from the "grid" parameters, so we can choose +; are constructing this grid from the "grid" parameters, so we can choose ; this data type to be whatever we like (i.e. "float" or "double"). ; ; ********************************************************************** @@ -151,6 +167,11 @@ begin ; num_params = dimsizes(param_names) + msg = char_nl + \ +"Reading in write-component output grid coordinate system parameters " + char_nl + \ +"from file wrtcmp_config_fn:" + char_nl + \ +" wrtcmp_config_fn = " + char_dq + wrtcmp_config_fn + char_dq + do np=0, num_params-1 param_name = param_names(np) @@ -160,89 +181,173 @@ begin + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn sed_output = systemfunc(sed_cmd) - if (strcmp_exact(param_name, "cen_lon")) then - lon_ctr := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "cen_lat")) then - lat_ctr := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "stdlat1")) then - lat1 := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "stdlat2")) then - lat2 := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "nx")) then -; nxp := totype(sed_output, "integer") - nx := totype(sed_output, "integer") - else if (strcmp_exact(param_name, "ny")) then -; nyp := totype(sed_output, "integer") - ny := totype(sed_output, "integer") - else if (strcmp_exact(param_name, "lon1")) then - lon_cell_cntr_SW := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "lat1")) then - lat_cell_cntr_SW := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "dx")) then - dx := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "dy")) then - dy := totype(sed_output, coord_data_type) + unrecognized_param = False + + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + if (strcmp_exact(param_name, "cen_lon")) then + lon_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "cen_lat")) then + lat_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lon1")) then + rot_lon_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lat1")) then + rot_lat_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lon2")) then + rot_lon_cell_cntr_NE := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lat2")) then + rot_lat_cell_cntr_NE := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dlon")) then + dlon := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dlat")) then + dlat := totype(sed_output, coord_data_type) + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + if (strcmp_exact(param_name, "cen_lon")) then + lon_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "cen_lat")) then + lat_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "stdlat1")) then + lat1 := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "stdlat2")) then + lat2 := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "nx")) then + nx := totype(sed_output, "integer") + else if (strcmp_exact(param_name, "ny")) then + ny := totype(sed_output, "integer") + else if (strcmp_exact(param_name, "lon1")) then + lon_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lat1")) then + lat_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dx")) then + dx := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dy")) then + dy := totype(sed_output, coord_data_type) + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + else msg := char_nl + \ -"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ -" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Grid parameters have not yet been specified for this value of the " + char_nl + \ +"write-component output grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ "Stopping." print("" + msg) exit end if end if - end if - end if - end if - end if - end if - end if - end if - end if - - end do ; ; ********************************************************************** ; -; Print out values of parameters read in from the write-component con- -; figuration file. +; ; ; ********************************************************************** ; - if (strcmp_exact(output_coord_sys, "lambert_conformal")) then + if (unrecognized_param) then - print("") - print("" + separator_line) + msg := char_nl + \ +"Unknown parameter name specified for the given write-component output" + char_nl + \ +"grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + print("" + msg) + exit - msg = char_nl + \ -"Values of write-component output grid coordinate system parameters read" + char_nl + \ -"in from the model_configure file are:" + char_nl + \ -" lon_ctr = " + lon_ctr + char_nl + \ -" lat_ctr = " + lat_ctr + char_nl + \ -" lat1 = " + lat1 + char_nl + \ -" lat2 = " + lat2 + char_nl + \ -" nx = " + nx + char_nl + \ -" ny = " + ny + char_nl + \ -" lon_cell_cntr_SW = " + lon_cell_cntr_SW + char_nl + \ -" lat_cell_cntr_SW = " + lat_cell_cntr_SW + char_nl + \ -" dx = " + dx + char_nl + \ -" dy = " + dy + char_nl - print("" + msg) + end if + + end do - else - msg := char_nl + \ -"Values of write-component output grid coordinate system parameters have" + \ -"not been set for this output coordinate system:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ -"Stopping." - print("" + msg) - exit + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + dx = dlon + dy = dlat + nx = (rot_lon_cell_cntr_NE - rot_lon_cell_cntr_SW)/dlon + 1 + nx := round(nx, 3) + ny = (rot_lat_cell_cntr_NE - rot_lat_cell_cntr_SW)/dlat + 1 + ny := round(ny, 3) +print("") +print("nx = " + nx) +print("ny = " + ny) +pause end if + +; +; ********************************************************************** +; +; Print out values of parameters read in from the write-component con- +; figuration file. +; +; ********************************************************************** +; +; msg = char_nl + \ +;"Values of write-component output grid coordinate system parameters read" + char_nl + \ +;"in from the model_configure file are:" +; +; do np=0, num_params-1 +; param_name = param_names(np) +; param_value = $param_name$ +; msg = char_nl + \ +;" " + param_name + " = " + param_value +; end do + +; if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then +; +; else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then +; +; print("") +; print("" + separator_line) +; +; msg = char_nl + \ +;"Values of write-component output grid coordinate system parameters read" + char_nl + \ +;"in from the model_configure file are:" + char_nl + \ +;" lon_ctr = " + lon_ctr + char_nl + \ +;" lat_ctr = " + lat_ctr + char_nl + \ +;" lat1 = " + lat1 + char_nl + \ +;" lat2 = " + lat2 + char_nl + \ +;" nx = " + nx + char_nl + \ +;" ny = " + ny + char_nl + \ +;" lon_cell_cntr_SW = " + lon_cell_cntr_SW + char_nl + \ +;" lat_cell_cntr_SW = " + lat_cell_cntr_SW + char_nl + \ +;" dx = " + dx + char_nl + \ +;" dy = " + dy + char_nl +; print("" + msg) +; +; else +; +; msg := char_nl + \ +;"Grid parameters have not yet been specified for this value of the " + char_nl + \ +;"write-component output grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +;" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ +;"Stopping." +; print("" + msg) +; exit +; +; end if ; ; ********************************************************************** ; @@ -251,25 +356,27 @@ begin ; ********************************************************************** ; angle_units = "deg" - -; nx = nxp - 1 -; ny = nyp - 1 - ; ; ********************************************************************** ; -; Use the given spherical coordinates (lon_cell_cntr_SW, lat_cell_cntr_SW) of the southwest -; corner of the grid to calculate the Lambert conformal coordinates -; (x_cell_cntr_SW, y_cell_cntr_SW) of that corner. +; Use the given spherical coordinates (lon_cell_cntr_SW, lat_cell_cntr_- +; SW) of the southwest corner of the grid to calculate the Lambert con- +; formal coordinates (x_cell_cntr_SW, y_cell_cntr_SW) of that corner. ; ; ********************************************************************** ; - lambert_coords \ - := calc_lambert_cnfrml_coords_from_sphr( \ - lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ - lon_cell_cntr_SW, lat_cell_cntr_SW) - x_cell_cntr_SW = lambert_coords@x - y_cell_cntr_SW = lambert_coords@y + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + x_cell_cntr_SW = rot_lon_cell_cntr_SW + y_cell_cntr_SW = rot_lat_cell_cntr_SW + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + lambert_cnfrml_coords \ + := convert_sphr_coords_to_lambert_cnfrml( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + lon_cell_cntr_SW, lat_cell_cntr_SW) + x_cell_cntr_SW = lambert_cnfrml_coords@x + y_cell_cntr_SW = lambert_cnfrml_coords@y + end if + end if x_min = x_cell_cntr_SW - 0.5d+0*dx y_min = y_cell_cntr_SW - 0.5d+0*dy @@ -315,11 +422,22 @@ begin ; ; ********************************************************************** ; - sphr_coords := calc_sphr_coords_from_lambert_cnfrml( \ - lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ - x_verts, y_verts) - lon_verts = sphr_coords@lon - lat_verts = sphr_coords@lat + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, -1, \ + x_verts, y_verts) + lon_verts = sphr_coords@lon_out + lat_verts = sphr_coords@lat_out + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + sphr_coords \ + := convert_lambert_cnfrml_coords_to_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_verts, y_verts) + lon_verts = sphr_coords@lon + lat_verts = sphr_coords@lat + end if + end if ; ; ********************************************************************** ; @@ -349,11 +467,22 @@ begin ; ; ********************************************************************** ; - sphr_coords := calc_sphr_coords_from_lambert_cnfrml( \ - lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ - x_cntrs, y_cntrs) - lon_cntrs = sphr_coords@lon - lat_cntrs = sphr_coords@lat + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, -1, \ + x_cntrs, y_cntrs) + lon_cntrs = sphr_coords@lon_out + lat_cntrs = sphr_coords@lat_out + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + sphr_coords \ + := convert_lambert_cnfrml_coords_to_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_cntrs, y_cntrs) + lon_cntrs = sphr_coords@lon + lat_cntrs = sphr_coords@lat + end if + end if ; ; ********************************************************************** ; diff --git a/ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl b/ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl new file mode 100644 index 0000000000..4dd627aafe --- /dev/null +++ b/ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl @@ -0,0 +1,104 @@ +; +; ********************************************************************** +; +; File name: calc_rotated_sphr_coords_from_sphr.ncl +; Author: Gerard Ketefian +; +; Description: +; ^^^^^^^^^^^ +; This function calculates the Lambert conformal coordinates x and y +; corresponding to the specified spherical coordinates lon (longitude) +; and lat (latitude). +; +; ********************************************************************** +; +loadscript(lib_location + "calc_rotated_sphr_proj_auxil_params.ncl") + +undef("calc_rotated_sphr_coords_from_sphr") + +function calc_rotated_sphr_coords_from_sphr( \ + lon_ctr:snumeric, lat_ctr:snumeric, \ + lat1:snumeric, lat2:snumeric, radius:snumeric, \ + angle_units:string, \ + lon:snumeric, lat:snumeric) + +local proj_params, \ + lon_ctr_rad, lat_ctr_rad, lat1_rad, lat2_rad, \ + n, F, rho_ctr, \ + angle_units_short_lwrcase, \ + lon_rad, lat_rad, rho, n_dlon_rad, \ + x, y, coords_out + +begin +; +; ********************************************************************** +; +; Set the name of the current script or function. We have to do this +; manually because NCL does not seem to have a built-in method of ob- +; taining this information. +; +; ********************************************************************** +; + curnt_script_proc_func_name \ + := "function calc_rotated_sphr_coords_from_sphr" +; +; ********************************************************************** +; +; Calculate the auxiliary Lambert conformal map projection parameters +; that are needed in the calculation of the Lambert conformal coordi- +; nates (x,y) below. +; +; ********************************************************************** +; + proj_params := calc_rotated_sphr_proj_auxil_params( \ + lon_ctr, lat_ctr, lat1, lat2, radius, angle_units) + + lon_ctr_rad = proj_params@lon_ctr_rad + lat_ctr_rad = proj_params@lat_ctr_rad + lat1_rad = proj_params@lat1_rad + lat2_rad = proj_params@lat2_rad + n = proj_params@n + F = proj_params@F + rho_ctr = proj_params@rho_ctr +; +; ********************************************************************** +; +; If necessary, convert longitude and latitude from degrees to radians. +; +; ********************************************************************** +; + angle_units_short_lwrcase = str_lower(str_get_cols(angle_units, 0, 2)) + if (strcmp_exact(angle_units_short_lwrcase, "deg")) then + lon_rad = lon*rads_per_deg + lat_rad = lat*rads_per_deg + else + lon_rad = lon + lat_rad = lat + end if +; +; ********************************************************************** +; +; Calculate the Lambert conformal coordinates (x,y) using the projection +; parameters obtained above and the specified spherical coordinates. +; +; ********************************************************************** +; + rho = F/((tan(0.25d+0*pi_geom + 0.5d+0*lat_rad))^n) + n_dlon_rad = n*(lon_rad - lon_ctr_rad) + x = rho*sin(n_dlon_rad) + y = rho_ctr - rho*cos(n_dlon_rad) +; +; ********************************************************************** +; +; Return results as attributes of the logical variable coords_out. +; +; ********************************************************************** +; + coords_out = True + coords_out@x = x + coords_out@y = y + return(coords_out) + +end + + diff --git a/ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl b/ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl new file mode 100644 index 0000000000..e01b95622d --- /dev/null +++ b/ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl @@ -0,0 +1,105 @@ +; +; ********************************************************************** +; +; File name: calc_sphr_coords_from_rotated_sphr.ncl +; Author: Gerard Ketefian +; +; Description: +; ^^^^^^^^^^^ +; This function calculates the spherical coordinates (longitude and la- +; titude) corresponding to the specified Lambert conformal coordinates +; x and y. +; +; ********************************************************************** +; +loadscript(lib_location + "calc_rotated_sphr_proj_auxil_params.ncl") + +undef("calc_sphr_coords_from_rotated_sphr") + +function calc_sphr_coords_from_rotated_sphr( \ + lon_ctr:snumeric, lat_ctr:snumeric, \ + lat1:snumeric, lat2:snumeric, radius:snumeric, \ + angle_units:string, \ + x:snumeric, y:snumeric) + +local proj_params, \ + lon_ctr_rad, lat_ctr_rad, lat1_rad, lat2_rad, \ + n, F, rho_ctr, \ + rho_ctr_mns_y, rho, theta, lon_rad, lat_rad, \ + angle_units_short_lwr, lon, lat + +begin +; +; ********************************************************************** +; +; Set the name of the current script or function. We have to do this +; manually because NCL does not seem to have a built-in method of ob- +; taining this information. +; +; ********************************************************************** +; + curnt_script_proc_func_name \ + := "function calc_sphr_coords_from_rotated_sphr" +; +; ********************************************************************** +; +; Calculate the auxiliary Lambert conformal map projection parameters +; thar are needed in the calculation of the spherical coordinates (lon, +; lat) below. +; +; ********************************************************************** +; + proj_params := calc_rotated_sphr_proj_auxil_params( \ + lon_ctr, lat_ctr, lat1, lat2, radius, angle_units) + + lon_ctr_rad = proj_params@lon_ctr_rad + lat_ctr_rad = proj_params@lat_ctr_rad + lat1_rad = proj_params@lat1_rad + lat2_rad = proj_params@lat2_rad + n = proj_params@n + F = proj_params@F + rho_ctr = proj_params@rho_ctr +; +; ********************************************************************** +; +; Calculate the spherical coordinates (lon_rad,lat_rad) using the pro- +; jection parameters obtained above and the specified Lambert conformal +; coordinates. Note that lon_rad and lat_rad are in units of radians. +; +; ********************************************************************** +; + rho_ctr_mns_y = rho_ctr - y + rho = sign_matlab(n)*sqrt(x^2 + rho_ctr_mns_y^2) + theta = atan(x/rho_ctr_mns_y) + lon_rad = lon_ctr_rad + theta/n + lat_rad = 2.0d+0*atan((F/rho)^(1.0d+0/n)) - 0.5d+0*pi_geom +; +; ********************************************************************** +; +; If necessary, convert angles from radians to degrees. +; +; ********************************************************************** +; + angle_units_short_lwr = str_lower(str_get_cols(angle_units, 0, 2)) + if (strcmp_exact(angle_units_short_lwr, "deg")) then + lon = lon_rad*degs_per_rad + lat = lat_rad*degs_per_rad + else + lon = lon_rad + lat = lat_rad + end if +; +; ********************************************************************** +; +; Return results as attributes of the logical variable coords_out. +; +; ********************************************************************** +; + coords_out = True + coords_out@lon = lon + coords_out@lat = lat + return(coords_out) + +end + + diff --git a/ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl b/ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl new file mode 100644 index 0000000000..9de7d9cc94 --- /dev/null +++ b/ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl @@ -0,0 +1,162 @@ +; +; ********************************************************************** +; +; File name: convert_from_to_sphr_coords_to_from_rotated_sphr.ncl +; Author: Gerard Ketefian +; +; Description: +; ^^^^^^^^^^^ +; This function transforms the given longitudes and latitudes from sphe- +; rical to rotated spherical coordinates or vice versa. The positive X- +; axis of the rotated spherical coordinate system intersects the sphere +; at the (non-rotated) longitude and latitude (lon0, lat0). +; +; The input argument dir determines the direction of the transformation, +; as follows. If dir is set to 1, this function assumes that the input +; longitudes and latitudes (lon_in, lat_in) are in non-rotated spherical coor- +; dinates and calculates the corresponding output longitudes and lati- +; tudes (lon_out, lat_out) in rotated spherical coordinates. If dir is +; set to -1, this function assumes that (lon_in, lat_in) are specified in ro- +; tated coordinates and calculates the corresponding output coordinates +; (lon_out, lat_out) in non-rotated spherical coordinates. No other +; values of dir are allowed. +; +; ********************************************************************** +; +undef("convert_from_to_sphr_coords_to_from_rotated_sphr") + +function convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon0:snumeric, lat0:snumeric, \ + angle_units:string, dir:integer, \ + lon_in:snumeric, lat_in:snumeric) + +local lon0_rad, lat0_rad, \ + lon_in_rad, lat_in_rad, \ + lon_in_offset_rad, pmlat0_rad, msg, \ + sin_lon_in_offset, cos_lon_in_offset, \ + sin_pmlat0_rad, cos_pmlat0_rad, \ + sin_lat_in, cos_lat_in, tan_lat_in, \ + numer_atan, denom_atan, \ + lon_out, lat_out, coords_out + +begin +; +; ********************************************************************** +; +; If necessary, convert longitude and latitude from degrees to radians. +; +; ********************************************************************** +; + angle_units_short_lwrcase = str_lower(str_get_cols(angle_units, 0, 2)) + if (strcmp_exact(angle_units_short_lwrcase, "deg")) then + lon0_rad = lon0*rads_per_deg + lat0_rad = lat0*rads_per_deg + lon_in_rad = lon_in*rads_per_deg + lat_in_rad = lat_in*rads_per_deg + else + lon0_rad = lon0 + lat0_rad = lat0 + lon_in_rad = lon_in + lat_in_rad = lat_in + end if +; +; ********************************************************************** +; * +; +; * +; ********************************************************************** +; + if (dir .eq. 1) then + lon_in_offset_rad = lon_in_rad - lon0_rad + pmlat0_rad = lat0_rad + else if (dir .eq. -1) then + lon_in_offset_rad = lon_in_rad + pmlat0_rad = -lat0_rad + else + msg := char_nl + \ +"Disallowed value specified for dir:" + char_nl + \ +" dir = " + dir + char_nl + \ +"Set dir to 1 to transform from spherical to rotated spherical coordi-" + char_nl + \ +"nates, or set dir to -1 to transform from rotated spherical to spheri-" + char_nl + \ +"cal coordinates." + char_nl + \ +"Stopping." + print(msg + "") + exit + end if + end if +; +; ********************************************************************** +; * +; Calculate sines, cosines, and tangents of various angles. +; * +; ********************************************************************** +; + sin_lon_in_offset = sin(lon_in_offset_rad) + cos_lon_in_offset = cos(lon_in_offset_rad) + + sin_pmlat0_rad = sin(pmlat0_rad) + cos_pmlat0_rad = cos(pmlat0_rad) + sin_lat_in = sin(lat_in_rad) + cos_lat_in = cos(lat_in_rad) + tan_lat_in = sin_lat_in/cos_lat_in +; +; ********************************************************************** +; * +; Calculate the transformed longitude lon_out. +; * +; ********************************************************************** +; + numer_atan = sin_lon_in_offset + denom_atan = sin_pmlat0_rad*tan_lat_in + cos_pmlat0_rad*cos_lon_in_offset + lon_out = atan2(numer_atan, denom_atan) + if (dir .eq. -1) then + lon_out = lon_out + lon0_rad + end if +; +; ********************************************************************** +; * +; Make sure that the output value of longitude are in the range -pi <= +; lon < pi. +; * +; ********************************************************************** +; + lon_out := where(lon_out .lt. -pi_geom, lon_out + 2.0d+0*pi_geom, lon_out) + lon_out := where(lon_out .ge. pi_geom, lon_out - 2.0d+0*pi_geom, lon_out) +; +; ********************************************************************** +; * +; Calculate the transformed latitude lat_out. +; * +; ********************************************************************** +; + lat_out = asin(cos_pmlat0_rad*sin_lat_in \ + - sin_pmlat0_rad*cos_lon_in_offset*cos_lat_in) +; +; ********************************************************************** +; +; If necessary, convert output longitude and latitude from radians back +; to degrees. +; +; ********************************************************************** +; + if (strcmp_exact(angle_units_short_lwrcase, "deg")) then + lon_out = lon_out*degs_per_rad + lat_out = lat_out*degs_per_rad + end if +; +; ********************************************************************** +; +; Return results as attributes of the logical variable coords_out. +; +; ********************************************************************** +; + coords_out = True + coords_out@lon_out = lon_out + coords_out@lat_out = lat_out + return(coords_out) + +end + + + + diff --git a/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl b/ush/NCL/lib/convert_lambert_cnfrml_coords_to_sphr.ncl similarity index 93% rename from ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl rename to ush/NCL/lib/convert_lambert_cnfrml_coords_to_sphr.ncl index a0fee334c0..6cd0bd5f6c 100644 --- a/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl +++ b/ush/NCL/lib/convert_lambert_cnfrml_coords_to_sphr.ncl @@ -1,7 +1,7 @@ ; ; ********************************************************************** ; -; File name: calc_sphr_coords_from_lambert_cnfrml.ncl +; File name: convert_lambert_cnfrml_coords_to_sphr.ncl ; Author: Gerard Ketefian ; ; Description: @@ -14,9 +14,9 @@ ; loadscript(lib_location + "calc_lambert_cnfrml_proj_auxil_params.ncl") -undef("calc_sphr_coords_from_lambert_cnfrml") +undef("convert_lambert_cnfrml_coords_to_sphr") -function calc_sphr_coords_from_lambert_cnfrml( \ +function convert_lambert_cnfrml_coords_to_sphr( \ lon_ctr:snumeric, lat_ctr:snumeric, \ lat1:snumeric, lat2:snumeric, radius:snumeric, \ angle_units:string, \ @@ -39,7 +39,7 @@ begin ; ********************************************************************** ; curnt_script_proc_func_name \ - := "function calc_sphr_coords_from_lambert_cnfrml" + := "function convert_lambert_cnfrml_coords_to_sphr" ; ; ********************************************************************** ; diff --git a/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl b/ush/NCL/lib/convert_sphr_coords_to_lambert_cnfrml.ncl similarity index 93% rename from ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl rename to ush/NCL/lib/convert_sphr_coords_to_lambert_cnfrml.ncl index c78b88227a..572147bd32 100644 --- a/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl +++ b/ush/NCL/lib/convert_sphr_coords_to_lambert_cnfrml.ncl @@ -1,7 +1,7 @@ ; ; ********************************************************************** ; -; File name: calc_lambert_cnfrml_coords_from_sphr.ncl +; File name: convert_sphr_coords_to_lambert_cnfrml.ncl ; Author: Gerard Ketefian ; ; Description: @@ -14,9 +14,9 @@ ; loadscript(lib_location + "calc_lambert_cnfrml_proj_auxil_params.ncl") -undef("calc_lambert_cnfrml_coords_from_sphr") +undef("convert_sphr_coords_to_lambert_cnfrml") -function calc_lambert_cnfrml_coords_from_sphr( \ +function convert_sphr_coords_to_lambert_cnfrml( \ lon_ctr:snumeric, lat_ctr:snumeric, \ lat1:snumeric, lat2:snumeric, radius:snumeric, \ angle_units:string, \ @@ -40,7 +40,7 @@ begin ; ********************************************************************** ; curnt_script_proc_func_name \ - := "function calc_lambert_cnfrml_coords_from_sphr" + := "function convert_sphr_coords_to_lambert_cnfrml" ; ; ********************************************************************** ; diff --git a/ush/NCL/plot_FV3SAR_field_native.ncl b/ush/NCL/plot_FV3SAR_field_native.ncl index 85299115bd..db511b9dd0 100644 --- a/ush/NCL/plot_FV3SAR_field_native.ncl +++ b/ush/NCL/plot_FV3SAR_field_native.ncl @@ -18,10 +18,10 @@ ;help = True -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR25km" -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR13km" -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR3km" -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HAFSV0.A" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR25km" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR13km" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR3km" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HAFSV0.A" tile_inds := (/ 1, 7, 4 /) @@ -159,7 +159,7 @@ field_name_RAP = "sqrt_cell_area" ; This is for HRRR grib2 file. field_name_RAP = "VGTYP_P0_L1_GLC0" -RAP_dir = "/scratch3/BMC/fim/Gerard.Ketefian/regional_FV3_EMC_visit_20180509" +RAP_dir = "/scratch1/BMC/fim/Gerard.Ketefian/regional_FV3_EMC_visit_20180509" ;RAP_grid_fn = "/home/Gerard/fv3_regional/fv3sar_workflow/ush/NCL/plot_grid.ncl" ;RAP_grid_fn = "/scratch3/BMC/fim/Gerard.Ketefian/regional_FV3_EMC_visit_20180509/geo_em.d01.RAP.nc" ;RAP_grid_fn = RAP_dir + "/geo_em.d01.RAP.nc" @@ -465,8 +465,7 @@ end if num_tiles_to_plot = plot_params@num_tiles_to_plot inds_tiles_to_plot = plot_params@inds_tiles_to_plot cres = plot_params@cres - run_dir = plot_params@run_dir - work_dir = plot_params@work_dir + expt_dir = plot_params@expt_dir horiz_dist_units = plot_params@horiz_dist_units horiz_area_units = plot_params@horiz_area_units @@ -495,6 +494,75 @@ end if ; ; ********************************************************************** ; +; Get/construct the write-component grid. +; +; ********************************************************************** +; + +; These should be at the top and go through the process_...() function. + +; show_wrtcmp_grid = True + show_wrtcmp_grid = False + + show_wrtcmp_bdy = True +; show_wrtcmp_bdy = False + + wrtcmp_bdy_color = "green" + + +; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" +; rd := "/scratch3/BMC/det/Gerard.Ketefian" + rd := run_dir + "/2019052000" + + wrtcmp_config_fn := rd + "/model_configure" +; wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + if (show_wrtcmp_grid .or. show_wrtcmp_bdy) then +print("") +print("wrtcmp_config_fn = " + char_dq + wrtcmp_config_fn + char_dq) +pause + grid_info := get_wrtcmp_grid(wrtcmp_config_fn, show_wrtcmp_bdy) + + nx_wrtcmp = grid_info@nx + ny_wrtcmp = grid_info@ny + lon_cntrs_unstruc_wrtcmp = grid_info@lon_cntrs_unstruc + lat_cntrs_unstruc_wrtcmp = grid_info@lat_cntrs_unstruc + lon_verts_unstruc_wrtcmp = grid_info@lon_verts_unstruc + lat_verts_unstruc_wrtcmp = grid_info@lat_verts_unstruc + lon_bdy_wrtcmp = grid_info@lon_bdy + lat_bdy_wrtcmp = grid_info@lat_bdy + lon_grid_cntr_wrtcmp = grid_info@lon_grid_cntr + lat_grid_cntr_wrtcmp = grid_info@lat_grid_cntr + coord_data_type_wrtcmp = grid_info@coord_data_type + + print("") + print("lon_grid_cntr_wrtcmp = " + lon_grid_cntr_wrtcmp + " deg") + print("lat_grid_cntr_wrtcmp = " + lat_grid_cntr_wrtcmp + " deg") +pause + + + out := calc_wrtcmp_grid_params( \ + wrtcmp_config_fn, \ + lon_ctr_native, lat_ctr_native, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native, \ + dx_native, dy_native, \ + angle_units) + +print("") +print("out = " + out) +pause + + end if +; +; ********************************************************************** +; ; Read in the FV3SAR grid. ; ; ********************************************************************** @@ -513,7 +581,7 @@ end if if (read_FV3SAR_wrtcmp) then - FV3SAR_wrtcmp_fn = run_dir + "/" + CDATE + "/" + "dynf000.nc" + FV3SAR_wrtcmp_fn = expt_dir + "/" + CDATE + "/" + "dynf000.nc" get_domain_bdy = True grid_info := read_FV3SAR_grid_wrtcmp( \ FV3SAR_wrtcmp_fn, \ @@ -533,7 +601,7 @@ end if else grid_info := read_FV3SAR_grid_native( \ - work_dir, \ + expt_dir, \ gtype, \ cres, \ inds_tiles_to_plot, \ @@ -621,59 +689,59 @@ end if print("lat_grid_cntr_RAP = " + lat_grid_cntr_RAP + " deg") end if +;; +;; ********************************************************************** +;; +;; Get/construct the write-component grid. +;; +;; ********************************************************************** +;; +; +;; These should be at the top and go through the process_...() function. +; +;; show_wrtcmp_grid = True +; show_wrtcmp_grid = False +; +; show_wrtcmp_bdy = True +;; show_wrtcmp_bdy = False +; +; wrtcmp_bdy_color = "green" +; +; +;; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" +; rd := "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM" +; +; wrtcmp_config_fn := rd + "/model_configure" +; wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" +;; +;; ********************************************************************** +;; +;; +;; +;; ********************************************************************** +;; +; if (show_wrtcmp_grid .or. show_wrtcmp_bdy) then +; +; grid_info := get_wrtcmp_grid(wrtcmp_config_fn, show_wrtcmp_bdy) +; +; nx_wrtcmp = grid_info@nx +; ny_wrtcmp = grid_info@ny +; lon_cntrs_unstruc_wrtcmp = grid_info@lon_cntrs_unstruc +; lat_cntrs_unstruc_wrtcmp = grid_info@lat_cntrs_unstruc +; lon_verts_unstruc_wrtcmp = grid_info@lon_verts_unstruc +; lat_verts_unstruc_wrtcmp = grid_info@lat_verts_unstruc +; lon_bdy_wrtcmp = grid_info@lon_bdy +; lat_bdy_wrtcmp = grid_info@lat_bdy +; lon_grid_cntr_wrtcmp = grid_info@lon_grid_cntr +; lat_grid_cntr_wrtcmp = grid_info@lat_grid_cntr +; coord_data_type_wrtcmp = grid_info@coord_data_type +; +; print("") +; print("lon_grid_cntr_wrtcmp = " + lon_grid_cntr_wrtcmp + " deg") +; print("lat_grid_cntr_wrtcmp = " + lat_grid_cntr_wrtcmp + " deg") +;pause ; -; ********************************************************************** -; -; Get/construct the write-component grid. -; -; ********************************************************************** -; - -; These should be at the top and go through the process_...() function. - -; show_wrtcmp_grid = True - show_wrtcmp_grid = False - - show_wrtcmp_bdy = True -; show_wrtcmp_bdy = False - - wrtcmp_bdy_color = "green" - - -; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" - rd := "/scratch3/BMC/det/Gerard.Ketefian" - - wrtcmp_config_fn := rd + "/model_configure" - wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" -; -; ********************************************************************** -; -; -; -; ********************************************************************** -; - if (show_wrtcmp_grid .or. show_wrtcmp_bdy) then - - grid_info := get_wrtcmp_grid(wrtcmp_config_fn, show_wrtcmp_bdy) - - nx_wrtcmp = grid_info@nx - ny_wrtcmp = grid_info@ny - lon_cntrs_unstruc_wrtcmp = grid_info@lon_cntrs_unstruc - lat_cntrs_unstruc_wrtcmp = grid_info@lat_cntrs_unstruc - lon_verts_unstruc_wrtcmp = grid_info@lon_verts_unstruc - lat_verts_unstruc_wrtcmp = grid_info@lat_verts_unstruc - lon_bdy_wrtcmp = grid_info@lon_bdy - lat_bdy_wrtcmp = grid_info@lat_bdy - lon_grid_cntr_wrtcmp = grid_info@lon_grid_cntr - lat_grid_cntr_wrtcmp = grid_info@lat_grid_cntr - coord_data_type_wrtcmp = grid_info@coord_data_type - - print("") - print("lon_grid_cntr_wrtcmp = " + lon_grid_cntr_wrtcmp + " deg") - print("lat_grid_cntr_wrtcmp = " + lat_grid_cntr_wrtcmp + " deg") -pause - - end if +; end if ; ; ********************************************************************** ; diff --git a/ush/NCL/process_plot_params.ncl b/ush/NCL/process_plot_params.ncl index ac29977abc..a038c37de6 100644 --- a/ush/NCL/process_plot_params.ncl +++ b/ush/NCL/process_plot_params.ncl @@ -110,29 +110,30 @@ usage_msg + char_nl + \ ; ; ********************************************************************** ; -; Verify that the run directory (run_dir) has been specified on the com- -; mand line. If so, verify that it exists. +; Verify that the experiment directory (expt_dir) has been specified on +; the command line. If so, verify that it exists. ; ; ********************************************************************** ; - if (.not. isvar("run_dir")) then + if (.not. isvar("expt_dir")) then msg := char_nl + \ -"The run directory (run_dir) has not been specified on the command line:" + char_nl + \ -" isvar(" + char_dq + "run_dir" + char_dq + ") = " + isvar("run_dir") + char_nl + \ -"Please rerun with the run directory specified. " + \ +"The experiment directory (expt_dir) has not been specified on the com-" + char_nl + \ +"mand line:" + char_nl + \ +" isvar(" + char_dq + "expt_dir" + char_dq + ") = " + isvar("expt_dir") + char_nl + \ +"Please rerun with the experiment directory specified. " + \ example_usage_and_help_str print("" + msg) exit - else if (.not. fileexists(run_dir)) then + else if (.not. fileexists(expt_dir)) then msg := char_nl + \ -"The specified run directory (run_dir) does not exist:" + char_nl + \ -" run_dir = " + char_dq + run_dir + char_dq + char_nl + \ -" fileexists(run_dir) = " + fileexists(run_dir) + char_nl + \ -"Please rerun with an existing run directory. " + \ +"The specified experiment directory (expt_dir) does not exist:" + char_nl + \ +" expt_dir = " + char_dq + expt_dir + char_dq + char_nl + \ +" fileexists(expt_dir) = " + fileexists(expt_dir) + char_nl + \ +"Please rerun with an existing experiment directory. " + \ example_usage_and_help_str print("" + msg) @@ -140,37 +141,16 @@ example_usage_and_help_str end if end if -; -; ********************************************************************** -; -; Set the work directory, i.e. the temporary directory in which the pre- -; processing tasks of the workflow place their output files. -; -; ********************************************************************** -; -; Don't need this line since we got rid of work_dirs and put all prepro- -; cessing output in subdirectories in the experiment directory. -; work_dir = str_sub_str(run_dir, "expt_dirs", "work_dirs") - work_dir = run_dir - - - ; ; ********************************************************************** ; ; Set the full path to the variable definitions file. This file con- -; tains various run parameters that will be needed below. +; tains various experiment and workflow parameters that will be needed +; below. ; ; ********************************************************************** ; - var_defns_fn = run_dir + "/var_defns.sh" - -; Need to change this to go up one level since we changed the directory -; structure for cycling. -; var_defns_fn = run_dir + "/../var_defns.sh" - - - + var_defns_fn = expt_dir + "/var_defns.sh" ; ; ********************************************************************** ; @@ -178,10 +158,14 @@ example_usage_and_help_str ; ; ********************************************************************** ; - cmd = "sed --silent --regexp-extended --expression " + \ - "'s/(^gtype=)([" + char_dq + "]*)([A-Za-z]+)([" + char_dq + "]*)(.*)/\3/p' " + \ - var_defns_fn + cmd = "sed --silent --regexp-extended --expression " \ + + "'s/(^gtype=)([" + char_dq + "]*)([A-Za-z]+)([" + char_dq + "]*)(.*)/\3/p' " \ + + var_defns_fn gtype := systemfunc(cmd) + +; This is a temporary fix. Need this since we removed "gtype" from the +; variable defintions file of the community workflow. +gtype = "regional" ; ; ********************************************************************** ; @@ -251,15 +235,16 @@ example_usage_and_help_str ; ; ********************************************************************** ; - cmd = "sed --silent --regexp-extended --expression " + \ - "'s/^[ ]*(RES=)([" + char_dq + "]*)([0-9]+)([" + char_dq + "]*)(.*)/\3/p' " + \ - var_defns_fn + cmd = "sed --silent --regexp-extended --expression " \ + + "'s/^[ ]*(RES=)([" + char_dq + "]*)([0-9]+)([" + char_dq + "]*)(.*)/\3/p' " \ + + var_defns_fn res := systemfunc(cmd) -res := res(0) -print("==>> res = " + res) -; res := tointeger(systemfunc(cmd)) -;print("==>> res = " + res) -pause +; +; Get the last occurrence of "RES=..." in the variable defintions file +; since that's the one that matters. +; + num_elems = dimsizes(res) + res := res(num_elems-1) ; ; ********************************************************************** ; @@ -301,11 +286,6 @@ end if ; ; cres := "C" + tostring_with_format(res, "%i") cres := "C" + res -print("==>> cres = " + char_dq + cres + char_dq) -pause - - - @@ -1037,8 +1017,7 @@ char_dq + ".halo4" + char_dq + ")] should not be specified on the command line ( plot_params@num_tiles_to_plot = num_tiles_to_plot plot_params@inds_tiles_to_plot = inds_tiles_to_plot plot_params@cres = cres - plot_params@run_dir = run_dir - plot_params@work_dir = work_dir + plot_params@expt_dir = expt_dir plot_params@horiz_dist_units = horiz_dist_units plot_params@horiz_area_units = horiz_area_units diff --git a/ush/NCL/read_FV3SAR_grid_native.ncl b/ush/NCL/read_FV3SAR_grid_native.ncl index 5947bf670d..0905e9e1fe 100644 --- a/ush/NCL/read_FV3SAR_grid_native.ncl +++ b/ush/NCL/read_FV3SAR_grid_native.ncl @@ -14,10 +14,10 @@ ; ; The input arguments to this function are as follows: ; -; work_dir: -; This is the work directory created by the FV3SAR workflow. The grid -; specification files corresponding to the cubed-sphere tiles are within -; subdirectories under this directory. +; expt_dir: +; This is the experiment directory created by the FV3SAR workflow gene- +; ration script. The grid specification files corresponding to the +; cubed-sphere tiles are within subdirectories under this directory. ; ; gtype: ; This is a string containing the grid type of the FV3 cubed-sphere grid @@ -70,7 +70,7 @@ loadscript(lib_location + "adjust_longitude_range.ncl") undef("read_FV3SAR_grid_native") function read_FV3SAR_grid_native( \ - work_dir:string, \ + expt_dir:string, \ gtype:string, \ cres:string, \ tile_inds:integer, \ @@ -230,11 +230,10 @@ begin grid_fn = cres + "_grid.tile" + n_tile if (strcmp_exact(gtype, "regional") .and. (n_tile .eq. 7)) then grid_fn = grid_fn + ".halo" + tostring(nhalo_T7) + ".nc" - grid_fn = work_dir + "/fix_sar/" + grid_fn else grid_fn = grid_fn + ".nc" - grid_fn = work_dir + "/grid/" + grid_fn end if + grid_fn = expt_dir + "/fix_sar/" + grid_fn grid_fn_all_tiles(nn) = grid_fn print(" grid_fn_all_tiles(" + nn + ") = " + \ @@ -719,8 +718,8 @@ msg + char_nl + \ ; both poles lie within the tile (i.e. crossing of the IDL by a tile ; boundary is a necessary but not sufficient condition for one or both ; poles to lie within the tile). Below, if the IDL crossing check is -; triggered, we do we do not go further to check whether or not one or -; both poles lie within the tile (because that test is more complex). +; triggered, we do not go further to check whether or not one or both +; poles lie within the tile (because that test is more complex). ; ; ********************************************************************** ; @@ -933,7 +932,7 @@ msg + char_nl + \ ; the north and south poles and the one that straddles the IDL), the in- ; dices of the SW, SE, NE, and NW corners of the tile will not be set ; (they will remain set to their initial missing values), so the check -; below will be triggered. Something more comples needs to be done for +; below will be triggered. Something more complex needs to be done for ; such tiles, but we do not worry about it here for now since we're only ; dealing with regional domains. ; diff --git a/ush/check_for_preexist_dir.sh b/ush/bash_utils/check_for_preexist_dir.sh similarity index 72% rename from ush/check_for_preexist_dir.sh rename to ush/bash_utils/check_for_preexist_dir.sh index 982d066dde..730b393179 100644 --- a/ush/check_for_preexist_dir.sh +++ b/ush/bash_utils/check_for_preexist_dir.sh @@ -20,16 +20,41 @@ function check_for_preexist_dir() { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -ne 2 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + Usage: - ${FUNCNAME[0]} dir preexisting_dir_method + ${func_name} dir preexisting_dir_method where the arguments are defined as follows: @@ -40,6 +65,7 @@ where the arguments are defined as follows: String specifying the action to take if a preexisting version of dir is found. Valid values are \"delete\", \"rename\", and \"quit\". " + fi # #----------------------------------------------------------------------- @@ -53,6 +79,17 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # +# Set the valid values that preexisting_dir_method can take on and check +# to make sure the specified value is valid. +# +#----------------------------------------------------------------------- +# + local valid_vals_preexisting_dir_method=( "delete" "rename" "quit" ) + check_var_valid_value "preexisting_dir_method" \ + "valid_vals_preexisting_dir_method" +# +#----------------------------------------------------------------------- +# # Check if dir already exists. If so, act depending on the value of # preexisting_dir_method. # @@ -60,7 +97,7 @@ where the arguments are defined as follows: # if [ -d "$dir" ]; then - case $preexisting_dir_method in + case ${preexisting_dir_method} in # #----------------------------------------------------------------------- # @@ -94,13 +131,13 @@ where the arguments are defined as follows: old_dir="${dir}_old${old_indx}" done - print_info_msg "\ -Function \"${FUNCNAME[0]}\": Directory already exists: + print_info_msg "$VERBOSE" " +Specified directory (dir) already exists: dir = \"$dir\" Moving (renaming) preexisting directory to: - old_dir = \"$old_dir\"" + old_dir = \"${old_dir}\"" - mv_vrfy "$dir" "$old_dir" + mv_vrfy "$dir" "${old_dir}" ;; # #----------------------------------------------------------------------- @@ -115,26 +152,9 @@ Moving (renaming) preexisting directory to: "quit") print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Directory already exists: +Specified directory (dir) already exists: dir = \"$dir\"" ;; -# -#----------------------------------------------------------------------- -# -# If preexisting_dir_method is set to a disallowed value, we simply exit -# with a nonzero status. Note that "exit" is different than "return" -# because it will cause the calling script (in which this file/function -# is sourced) to stop execution. -# -#----------------------------------------------------------------------- -# - *) - - print_err_msg_exit "\ -Disallowed value for \"preexisting_dir_method\": - preexisting_dir_method = \"$preexisting_dir_method\" -Allowed values are: \"delete\" \"rename\" \"quit\"" - ;; esac @@ -148,6 +168,7 @@ Allowed values are: \"delete\" \"rename\" \"quit\"" #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + } diff --git a/ush/bash_utils/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh new file mode 100644 index 0000000000..723460ea14 --- /dev/null +++ b/ush/bash_utils/check_var_valid_value.sh @@ -0,0 +1,135 @@ +# +#----------------------------------------------------------------------- +# +# This function checks whether the specified variable contains a valid +# value (where the set of valid values is also specified). +# +#----------------------------------------------------------------------- +# +function check_var_valid_value() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -lt 2 ] || [ "$#" -gt 3 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} var_name valid_var_values_array_name [msg] + +where the arguments are defined as follows: + + var_name: + The name of the variable whose value we want to check for validity. + + valid_var_values_array_name: + The name of the array containing a list of valid values that var_name + can take on. + + msg + Optional argument specifying the first portion of the error message to + print out if var_name does not have a valid value. +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local var_name \ + valid_var_values_array_name \ + var_value \ + valid_var_values_at \ + valid_var_values \ + err_msg \ + valid_var_values_str +# +#----------------------------------------------------------------------- +# +# Set local variable values. +# +#----------------------------------------------------------------------- +# + var_name="$1" + valid_var_values_array_name="$2" + + var_value=${!var_name} + valid_var_values_at="$valid_var_values_array_name[@]" + valid_var_values=("${!valid_var_values_at}") + + if [ "$#" -eq 3 ]; then + err_msg="$3" + else + err_msg="\ +The value specified in ${var_name} is not supported: + ${var_name} = \"${var_value}\"" + fi +# +#----------------------------------------------------------------------- +# +# Check whether var_value is equal to one of the elements of the array +# valid_var_values. If not, print out an error message and exit the +# calling script. +# +#----------------------------------------------------------------------- +# + is_element_of "valid_var_values" "${var_value}" || { \ + valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); + print_err_msg_exit "\ +${err_msg} +${var_name} must be set to one of the following: + ${valid_var_values_str}"; \ + } +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/bash_utils/count_files.sh b/ush/bash_utils/count_files.sh new file mode 100644 index 0000000000..633bbf7ce4 --- /dev/null +++ b/ush/bash_utils/count_files.sh @@ -0,0 +1,86 @@ +# +#----------------------------------------------------------------------- +# +# This function returns the number of files in the current directory +# that end with the specified extension (file_extension). +# +#----------------------------------------------------------------------- +# +function count_files() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} file_extension + +where file_extension is the file extension to use for counting files. +The file count returned will be equal to the number of files in the cur- +rent directory that end with \".${file_extension}\". +" + + fi +# +#----------------------------------------------------------------------- +# +# Count the number of files and then print it to stdout. +# +#----------------------------------------------------------------------- +# + local file_extension="$1" + local glob_pattern="*.${file_extension}" + local num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) + print_info_msg "${num_files}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/filesys_cmds_vrfy.sh b/ush/bash_utils/filesys_cmds_vrfy.sh similarity index 64% rename from ush/filesys_cmds_vrfy.sh rename to ush/bash_utils/filesys_cmds_vrfy.sh index d6988fb7b3..d11ec4720e 100644 --- a/ush/filesys_cmds_vrfy.sh +++ b/ush/bash_utils/filesys_cmds_vrfy.sh @@ -22,19 +22,76 @@ function filesys_cmd_vrfy() { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +# Below, the index into FUNCNAME and BASH_SOURCE is 2 (not 1 as is usu- +# ally the case) because this function is called by functions such as +# cp_vrfy, mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these +# are just wrappers, and in the error and informational messages, we are +# really interested in the scripts/functions that in turn call these +# wrappers. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[2]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_name="${FUNCNAME[2]}" +# +#----------------------------------------------------------------------- +# # Check that at least one argument is supplied. # #----------------------------------------------------------------------- # if [ "$#" -lt 1 ]; then - print_err_msg_exit "\ -From function \"${FUNCNAME[0]}\": At least one argument must be specified: - number of arguments = \$# = $# -Usage is: - ${FUNCNAME[0]} cmd args_to_cmd -where \"cmd\" is the command to execute and \"args_to_cmd\" are the options and -arguments to pass to that command." + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} cmd [args_to_cmd] + +where \"cmd\" is the name of the command to execute and \"args_to_cmd\" +are zero or more options and arguments to pass to that command. +" fi # @@ -43,8 +100,8 @@ arguments to pass to that command." # The first argument to this function is the command to execute while # the remaining ones are the arguments to that command. Extract the # command and save it in the variable "cmd". Then shift the argument -# list so that $@ contains the arguments to the command but not the com- -# mand itself. +# list so that $@ contains the arguments to the command but not the +# name of the command itself. # #----------------------------------------------------------------------- # @@ -83,10 +140,24 @@ arguments to pass to that command." # #----------------------------------------------------------------------- # - if [ $exit_code -ne 0 ]; then + if [ "${caller_name}" = "main" ] || \ + [ "${caller_name}" = "script" ]; then + script_or_function="the script" + else + script_or_function="function \"${caller_name}\"" + fi + + if [ ${exit_code} -ne 0 ]; then + print_err_msg_exit "\ -From function \"${FUNCNAME[0]}\": \"$cmd\" operation failed: +Call to function \"${cmd}_vrfy\" failed. This function was called from +${script_or_function} in file: + + \"${caller_fp}\" + +Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation: $output" + fi # #----------------------------------------------------------------------- @@ -112,9 +183,16 @@ $output" #----------------------------------------------------------------------- # if [ -n "$output" ]; then - print_info_msg "\ -From function \"${FUNCNAME[0]}\": Message from \"$cmd\" operation: + + print_info_msg " +\"${cmd}_vrfy\" operation returned with a message. This command was +issued from ${script_or_function} in file: + + \"${caller_fp}\" + +Message from \"${cmd}_vrfy\" function's \"$cmd\" operation: $output" + fi # #----------------------------------------------------------------------- @@ -125,6 +203,7 @@ $output" #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + } diff --git a/ush/bash_utils/get_elem_inds.sh b/ush/bash_utils/get_elem_inds.sh new file mode 100644 index 0000000000..70da5b248e --- /dev/null +++ b/ush/bash_utils/get_elem_inds.sh @@ -0,0 +1,180 @@ +# +#----------------------------------------------------------------------- +# +# For a description of this function, see the usage message below. +# +#----------------------------------------------------------------------- +# +function get_elem_inds() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 2 ] && [ "$#" -ne 3 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} array_name str_to_match [inds_to_return] + +This function prints to stdout the indices of those elements of a given +array that match (i.e. are equal to) a given string. It can return the +index of the first matched element, the index of the last matched ele- +ment, or the indices of all matched elements. The return code +from this function will be zero if at least one match is found and non- +zero if no matches are found. + +The arguments to this function are defined as follows: + + array_name: + The name of the array in which to search for str_to_match. Note that + this is the name of the array, not the array itself. + + str_to_match: + The string to match in array_name. + + inds_to_return: + Optional argument that specifies the subset of the indices of the ar- + ray elements that match str_to_match to print to stdout. Must be set + to \"first\", \"last\", or \"all\" (but is case insensitive). If set to + \"first\", the index of only the first matched element is printed. If + set to \"last\", the index of only the last matched element is printed. + If set to \"all\", the indices of all matched elements are printed. De- + fault is \"all\". +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local array_name \ + str_to_match \ + inds_to_return \ + array_name_at \ + array \ + valid_vals_inds_to_return \ + match_inds \ + num_matches \ + num_elems \ + n +# +#----------------------------------------------------------------------- +# +# Set local variables to appropriate input arguments. +# +#----------------------------------------------------------------------- +# + array_name="$1" + str_to_match="$2" + + inds_to_return="all" + if [ "$#" -eq 3 ]; then + inds_to_return="$3" + fi + + array_name_at="$array_name[@]" + array=("${!array_name_at}") +# +#----------------------------------------------------------------------- +# +# Change all letters in inds_to_return to lower case. Then check whe- +# ther it has a valid value. +# +#----------------------------------------------------------------------- +# + inds_to_return="${inds_to_return,,}" + valid_vals_inds_to_return=( "first" "last" "all" ) + check_var_valid_value "inds_to_return" "valid_vals_inds_to_return" +# +#----------------------------------------------------------------------- +# +# Initialize the array match_inds to an empty array. This will contain +# the indices of any matched elements. Then loop through the elements +# of the given array and check whether each element is equal to str_to_- +# match. If so, save the index of that element as an element of match_- +# inds. If inds_to_return is set to "first", we break out of the loop +# after finding the first match in order to not waste computation. +# +#----------------------------------------------------------------------- +# + match_inds=() + num_matches=0 + + num_elems=${#array[@]} + for (( n=0; n<${num_elems}; n++ )); do + if [ "${array[$n]}" = "${str_to_match}" ]; then + match_inds[${num_matches}]=$n + num_matches=$((num_matches+1)) + if [ "${inds_to_return}" = "first" ]; then + break + fi + fi + done +# +#----------------------------------------------------------------------- +# +# Find the number of matches. If it is more than zero, print the indi- +# ces of the matched elements to stdout. +# +#----------------------------------------------------------------------- +# + num_matches=${#match_inds[@]} + if [ ${num_matches} -gt 0 ]; then + if [ "${inds_to_return}" = "last" ]; then + printf "%s\n" "${match_inds[-1]}" + else + printf "%s\n" "${match_inds[@]}" + fi + fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} diff --git a/ush/bash_utils/get_manage_externals_config_property.sh b/ush/bash_utils/get_manage_externals_config_property.sh new file mode 100644 index 0000000000..b42605ec9e --- /dev/null +++ b/ush/bash_utils/get_manage_externals_config_property.sh @@ -0,0 +1,242 @@ +# +#----------------------------------------------------------------------- +# +# For a description of this function, see the usage message below. +# +#----------------------------------------------------------------------- +# +function get_manage_externals_config_property() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 3 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} externals_cfg_fp external_name property_name + +This function searches a specified manage_externals configuration file +and extracts from it the value of the specified property of the external +with the specified name (e.g. the relative path in which the external +has been/will be cloned by the manage_externals utility). + +The arguments to this function are defined as follows: + + externals_cfg_fp: + The absolute or relative path to the manage_externals configuration + file that will be searched. + + external_name: + The name of the external to search for in the manage_externals confi- + guration file specified by externals_cfg_fp. + + property_name: + The name of the property whose value to obtain (for the external spe- + cified by external_name). +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local externals_cfg_fp \ + external_name \ + property_name \ + regex_search \ + line \ + property_value +# +#----------------------------------------------------------------------- +# +# Set the name of the manage_externals configuration file [which may be +# the absolute path to the file or a relative path (relative to the cur- +# rent working directory)], the name of the external in that file whose +# property value we want to extract, and the name of the property under +# that external. +# +#----------------------------------------------------------------------- +# + externals_cfg_fp="$1" + external_name="$2" + property_name="$3" +# +#----------------------------------------------------------------------- +# +# Check that the specified manage_externals configuration file exists. +# If not, print out an error message and exit. +# +#----------------------------------------------------------------------- +# + if [ ! -f "${externals_cfg_fp}" ]; then + print_err_msg_exit "\ +The specified manage_externals configuration file (externals_cfg_fp) +does not exist: + externals_cfg_fp = \"${externals_cfg_fp}\"" + fi +# +#----------------------------------------------------------------------- +# +# Use "sed" to extract the line in the configuration file containing the +# value of the specified property for the specified external (if such a +# line exists). To explain how the "sed" command below does this, we +# first number the lines in that command, as follows: +# +# (1) line=$( sed -r -n \ +# (2) -e "/^[ ]*\[${external_name}\]/!b" \ +# (3) -e ":SearchForLine" \ +# (4) -e "s/(${regex_search})/\1/;t FoundLine" \ +# (5) -e "n;bSearchForLine" \ +# (6) -e ":FoundLine" \ +# (7) -e "p;q" \ +# (8) "${externals_cfg_fp}" \ +# (9) ) +# +# This command works as follows. First, on line (1), the -r flag speci- +# fies that extended regular expressions should be allowed, and the -n +# flag suppresses the printing of each line in the file that sed pro- +# cesses. +# +# Line (2) checks for all lines in the file [which is specified on line +# (8)] that do NOT start with zero or more spaces followed by the exter- +# nal name in square brackets. (The ! before the "b" causes the nega- +# tion.) For each such line, the "b" causes the rest of the sed com- +# mands [specified by the arguments to the "-e" flags on lines (3) +# through (7)] to be skipped and for sed to read in the next line in the +# file. Note that if no line is found that starts with zero or more +# spaces followed by the external name in square brackets, sed will +# reach the end of the file and quit [and lines (3) through (6) will ne- +# ver be executed], and the variable "line" will get assigned to a null +# string. +# +# Lines (3) through (5) form a while-loop. After finding a line in the +# file that does start with zero or more spaces followed by the external +# name in square brackets, we pass through line (3) (which just defines +# the location of the SearchForLine label; it does not execute any com- +# mands) and execute line (4). This line checks whether the current +# line in the file has the form specified by the regular expression in +# regex_search but doesn't change the line (since the whole line is +# substuted back in via the parentheses around ${regex_search} and the +# \1). If not, sed moves on to line (5), which clears the contents of +# the pattern buffer and reads the next line in the file into it (be- +# cause of the "n" command). Execution then moves back to line 3 (be- +# cause of the "bSearchForLine" command). If the current line in the +# file does have the form specified by regex_search, line (5) places the +# current line in the file in the pattern buffer (whithout modifying +# it), and execution moves on to line (6) [because a substitution was +# successfully made on line 4, so the "t FoundLine" command moves the +# execution to line (6)]. Thus, once line (1) finds the start of the +# section for the specified external, lines (3) through (6) loop until a +# line defining the specified property is found (or the end of the file +# is reached). If and when this happens, sed execution moves to line +# (6). +# +# Line (6) just defines the location of the FoundLine label, so it +# doesn't actually execute any commands, and execution moves to line +# (7). On this line, the "p" command prints out the contents of the +# pattern buffer, which is the first line in the file after the start +# of the specified external that defines the property. Then the "q" +# command simply quits execution since we have found the line we are +# looking for. +# +#----------------------------------------------------------------------- +# + regex_search="^[ ]*(${property_name})[ ]*=[ ]*([^ ]*).*" + line=$( sed -r -n \ + -e "/^[ ]*\[${external_name}\]/!b" \ + -e ":SearchForLine" \ + -e "s/(${regex_search})/\1/;t FoundLine" \ + -e "n;bSearchForLine" \ + -e ":FoundLine" \ + -e "p;q" \ + "${externals_cfg_fp}" \ + ) +# +#----------------------------------------------------------------------- +# +# If the variable "line" is empty, it means the sed command above was +# not able to find a line in the configuration file that defines the +# specified property for the specified external. In this case, print +# out an error messge and exit. +# +#----------------------------------------------------------------------- +# + if [ -z "${line}" ]; then + + print_err_msg_exit "\ +In the specified manage_externals configuration file (externals_cfg_fp), +the specified property (property_name) was not found for the the speci- +fied external (external_name): + externals_cfg_fp = \"${externals_cfg_fp}\" + external_name = \"${external_name}\" + property_name = \"${property_name}\"" +# +#----------------------------------------------------------------------- +# +# If line is not empty, it means the sed command above was able to find +# a line in the configuration file that defines the specified property +# for the specified external. In this case, extract the property value +# from line and print it to stdout. +# +#----------------------------------------------------------------------- +# + else + + property_value=$( printf "%s" "${line}" | \ + sed -r -n -e "s/${regex_search}/\2/p" ) + printf "%s\n" "${property_value}" + + fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} diff --git a/ush/interpol_to_arbit_CRES.sh b/ush/bash_utils/interpol_to_arbit_CRES.sh old mode 100755 new mode 100644 similarity index 79% rename from ush/interpol_to_arbit_CRES.sh rename to ush/bash_utils/interpol_to_arbit_CRES.sh index d6d4e6ac9d..6f685c1a54 --- a/ush/interpol_to_arbit_CRES.sh +++ b/ush/bash_utils/interpol_to_arbit_CRES.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -function interpol_to_arbit_CRES () { +function interpol_to_arbit_CRES() { # #----------------------------------------------------------------------- # @@ -21,17 +21,41 @@ function interpol_to_arbit_CRES () { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -ne 3 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + Usage: - ${FUNCNAME[0]} RES RES_array prop_array + ${func_name} RES RES_array prop_array where the arguments are defined as follows: @@ -48,7 +72,6 @@ where the arguments are defined as follows: prop_array: The name of the array containing the values of the property corres- ponding to the cubed-sphere resolutions in RES_array. - " fi @@ -97,7 +120,7 @@ where the arguments are defined as follows: else - for ((i=0; i < ${num_valid_RESes}-1 ; i++)); do + for (( i=0; i<$((num_valid_RESes-1)); i++ )); do if [ "$RES" -gt "${RES_array[$i]}" ] && \ [ "$RES" -le "${RES_array[$i+1]}" ]; then @@ -129,4 +152,3 @@ where the arguments are defined as follows: } - diff --git a/ush/is_array.sh b/ush/bash_utils/is_array.sh similarity index 68% rename from ush/is_array.sh rename to ush/bash_utils/is_array.sh index ed9ecc2521..16be2fbb7a 100644 --- a/ush/is_array.sh +++ b/ush/bash_utils/is_array.sh @@ -12,7 +12,7 @@ # #----------------------------------------------------------------------- # -function is_array () { +function is_array() { # #----------------------------------------------------------------------- # @@ -25,23 +25,46 @@ function is_array () { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # - func_name="${FUNCNAME[0]}" - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Function \"${func_name}\": Incorrect number of arguments specified. + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + Usage: - ${func_name} var_name + ${func_name} var_name where var_name is the name of the variable to check to determine whether or not it is an array. - " + fi # #----------------------------------------------------------------------- @@ -67,10 +90,11 @@ or not it is an array. # #----------------------------------------------------------------------- # -# Return the variable "contains". +# Return the variable "is_an_array". # #----------------------------------------------------------------------- # return ${is_an_array} + } diff --git a/ush/bash_utils/is_element_of.sh b/ush/bash_utils/is_element_of.sh new file mode 100644 index 0000000000..bd92c6f3fd --- /dev/null +++ b/ush/bash_utils/is_element_of.sh @@ -0,0 +1,153 @@ +# +#----------------------------------------------------------------------- +# +# For a description of this function, see the usage message below. +# +#----------------------------------------------------------------------- +# +function is_element_of() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 2 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} array_name str_to_match + +This function checks whether the specified array contains the specified +string, i.e. whether at least one of the elements of the array is equal +to the string. The return code from this function will be zero if at +least one match is found and nonzero if no matches are found. + +The arguments to this function are defined as follows: + + array_name: + The name of the array in which to search for str_to_match. Note that + this is the name of the array, not the array itself. + + str_to_match: + The string to search for in array_name. + +Use this function in a script as follows: + + . ./is_element_of.sh + array_name=("1" "2" "3 4" "5") + + str_to_match="2" + is_element_of "${str_to_match}" array_name + echo $? # Should output 0. + + str_to_match="3 4" + is_element_of "${str_to_match}" array_name + echo $? # Should output 0. + + str_to_match="6" + is_element_of "${str_to_match}" array_name + echo $? # Should output 1. +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local array_name \ + str_to_match \ + array_name_at \ + array \ + found_match \ + num_elems \ + n +# +#----------------------------------------------------------------------- +# +# Set local variables to appropriate input arguments. +# +#----------------------------------------------------------------------- +# + array_name="$1" + str_to_match="$2" + + array_name_at="$array_name[@]" + array=("${!array_name_at}") +# +#----------------------------------------------------------------------- +# +# Initialize the return variable found_match to 1 (false). Then loop +# through the elements of the array and check whether each element is +# equal to str_to_match. Once a match is found, reset found_match to 0 +# (true) and break out of the loop. +# +#----------------------------------------------------------------------- +# + found_match=1 + num_elems=${#array[@]} + for (( n=0; n<${num_elems}; n++ )); do + if [ "${array[$n]}" = "${str_to_match}" ]; then + found_match=0 + break + fi + done +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Return the variable found_match. +# +#----------------------------------------------------------------------- +# + return ${found_match} + +} + diff --git a/ush/bash_utils/print_input_args.sh b/ush/bash_utils/print_input_args.sh new file mode 100644 index 0000000000..16494a8ae7 --- /dev/null +++ b/ush/bash_utils/print_input_args.sh @@ -0,0 +1,171 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that prints to stdout the names and val- +# ues of a specified list of variables that are the valid arguments to +# the script or function that calls this function. It is mainly used +# for debugging to check that the argument values passed to the calling +# script/function have been set correctly. Note that if a global varia- +# ble named VERBOSE is not defined, the message will be printed out. If +# a global variable named VERBOSE is defined, then the message will be +# printed out only if VERBOSE is set to TRUE. +# +#----------------------------------------------------------------------- +# +function print_input_args() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_name="${FUNCNAME[1]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} array_name_valid_caller_args + +where array_name_valid_caller_args is the name of the array containing +the names of valid arguments that can be passed to the calling script or +function. +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local array_name_valid_caller_args \ + valid_caller_args \ + script_or_function \ + msg \ + num_valid_args \ + i \ + line +# +#----------------------------------------------------------------------- +# +# Set the array containing the names of the arguments that can be passed +# to the calling script/function. +# +#----------------------------------------------------------------------- +# + array_name_valid_caller_args="$1" + valid_caller_args="${array_name_valid_caller_args}[@]" + valid_caller_args=("${!valid_caller_args}") +# +#----------------------------------------------------------------------- +# +# Set the message to print to stdout. +# +#----------------------------------------------------------------------- +# + if [ "${caller_name}" = "main" ] || \ + [ "${caller_name}" = "script" ]; then + script_or_function="the script" + else + script_or_function="function \"${caller_name}\"" + fi + + msg=" +The arguments to ${script_or_function} in file + + \"${caller_fp}\" + +have been set as follows: +" + + num_valid_caller_args="${#valid_caller_args[@]}" + for (( i=0; i<${num_valid_caller_args}; i++ )); do + line=$( declare -p "${valid_caller_args[$i]}" ) + msg=$( printf "%s\n%s" "$msg" " $line" ) + done +# +#----------------------------------------------------------------------- +# +# If a global variable named VERBOSE is not defined, print out the mes- +# sage. If it is defined, print out the message only if VERBOSE is set +# to TRUE. +# +#----------------------------------------------------------------------- +# + if [ ! -v VERBOSE ]; then + print_info_msg "$msg" + else + print_info_msg "$VERBOSE" "$msg" + fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh new file mode 100644 index 0000000000..544357f628 --- /dev/null +++ b/ush/bash_utils/print_msg.sh @@ -0,0 +1,320 @@ +# +#----------------------------------------------------------------------- +# +# This file defines functions used in printing formatted output to std- +# out (e.g. informational and error messages). +# +#----------------------------------------------------------------------- +# + +# +#----------------------------------------------------------------------- +# +# Function to print informational messages using printf. +# +#----------------------------------------------------------------------- +# +function print_info_msg() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_name="${FUNCNAME[1]}" +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local verbose \ + info_msg +# +#----------------------------------------------------------------------- +# +# If one argument is supplied, we assume it is the message to print out. +# between informational lines that are always printed. +# +#----------------------------------------------------------------------- +# + if [ "$#" -eq 1 ]; then + + verbose="TRUE" + info_msg="$1" + + elif [ "$#" -eq 2 ]; then + + verbose="$1" + info_msg="$2" +# +#----------------------------------------------------------------------- +# +# If no arguments or more than two arguments are supplied, print out a +# usage message and exit. +# +#----------------------------------------------------------------------- +# + else + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} [verbose] info_msg + +where the arguments are defined as follows: + + verbose: + This is an optional argument. If set to \"TRUE\", info_msg will be + printed to stdout. Otherwise, info_msg will not be printed. + + info_msg: + This is the informational message to print to stdout. + +This function prints an informational message to stdout. If one argu- +ment is passed in, then that argument is assumed to be info_msg and is +printed. If two arguments are passed in, then the first is assumed to +be verbose and the second info_msg. In this case, info_msg gets printed +only if verbose is set to \"TRUE\". +" + + fi +# +#----------------------------------------------------------------------- +# +# If verbose is set to "TRUE", print out the message. +# +#----------------------------------------------------------------------- +# + if [ "$verbose" = "TRUE" ]; then + printf "%s\n" "${info_msg}" + fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 +} + + + + +# +#----------------------------------------------------------------------- +# +# Function to print out an error message to stderr using printf and then +# exit. +# +#----------------------------------------------------------------------- +# +function print_err_msg_exit() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_name="${FUNCNAME[1]}" +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local msg_header \ + msg_footer \ + err_msg +# +#----------------------------------------------------------------------- +# +# Set the message header and footer. +# +#----------------------------------------------------------------------- +# + if [ "${caller_name}" = "main" ] || \ + [ "${caller_name}" = "script" ]; then + + msg_header=$( printf "\n\ +ERROR: + From script: \"${caller_fn}\" + Full path to script: \"${caller_fp}\" +" + ) + + else + + msg_header=$( printf "\n\ +ERROR: + From function: \"${caller_name}\" + In file: \"${caller_fn}\" + Full path to file: \"${caller_fp}\" +" + ) + + fi + + msg_footer=$( printf "\nExiting with nonzero status." ) +# +#----------------------------------------------------------------------- +# +# Check number of arguments and, if necessary, print out a usage message +# and exit. +# +#----------------------------------------------------------------------- +# + if [ "$#" -gt 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} err_msg + +where err_msg is an optional error message to print to stderr. Note +that a header and a footer are always added to err_msg. Thus, if err_- +msg is not specified, the message that is printed will consist of only +the header and footer. +" +# +#----------------------------------------------------------------------- +# +# If an argument is listed, set err_msg to that argument. Otherwise, +# set it to a null string. Then print out the complete error message to +# stderr and exit. +# +#----------------------------------------------------------------------- +# + else + + if [ "$#" -eq 0 ]; then + err_msg="" + elif [ "$#" -eq 1 ]; then + err_msg="\n$1" + fi + + printf "${msg_header}${err_msg}${msg_footer}\n" 1>&2 + exit 1 + + fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. This statement will not be reached due to the preceeding exit +# statement, but we include it here for completeness (i.e. there should +# be a call to restore_shell_opts that matches a preceeding call to +# save_shell_opts). +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/bash_utils/process_args.sh b/ush/bash_utils/process_args.sh new file mode 100644 index 0000000000..4473b5aba4 --- /dev/null +++ b/ush/bash_utils/process_args.sh @@ -0,0 +1,306 @@ +# +#----------------------------------------------------------------------- +# +# This function processes a list of variable name and value pairs passed +# to it as a set of arguments, starting with the second argument. We +# refer to these pairs as argument-value pairs (or "arg-val" pairs for +# short) because the variable names in these pairs represent the names +# of arguments to the script or function that calls this function (which +# we refer to here as the "caller"). The first argument to this func- +# tion being the name of an array that contains a list of valid argument +# names that the caller is allowed to accept. Each arg-val pair must +# have the form +# +# ARG_NAME=VAR_VALUE +# +# where ARG_NAME is the name of an argument and VAR_VALUE is the value +# to set that argument to. For each arg-val pair, this function creates +# a global variable named ARG_NAME and assigns to it the value VAR_VAL- +# UE. +# +# The purpose of this function is to provide a mechanism by which a pa- +# rent script, say parent.sh, can pass variable values to a child script +# or function, say child.sh, that makes it very clear which arguments of +# child.sh are being set and to what values. For example, parent.sh can +# call child.sh as follows: +# +# ... +# child.sh arg3="Hello" arg2="bye" arg4=("this" "is" "an" "array") +# ... +# +# Then child.sh can use this function (process_args) as follows to pro- +# cess the arg-val pairs passed to it: +# +# ... +# valid_args=( "arg1" "arg2" "arg3" "arg4" ) +# process_args valid_args "$@" +# ... +# +# Here, valid_args is an array that defines or "declares" the argument +# list for child.sh, i.e. it defines the variable names that child.sh is +# allowed to accept as arguments. Its name is passed to process_args as +# the first argument. The "$@" appearing in the call to process_args +# passes to process_args the list of arg-val pairs that parent.sh passes +# to child.sh as the second through N-th arguments. In the example +# above, "$@" represents: +# +# arg3="Hello" arg2="bye" arg4=("this" "is" "an" "array") +# +# After the call to process_args in child.sh, the variables arg1, arg2, +# arg3, and arg4 will be set as follows in child.sh: +# +# arg1="" +# arg2="bye" +# arg3="Hello" +# arg4=("this" "is" "an" "array") +# +# Note that: +# +# 1) The set of arg-val pairs may list only a subset of the list of arg- +# uments declared in valid_args; the unlisted arguments will be set +# to the null string. In the example above, arg1 is set to the null +# string because it is not specified in any of the arg-val pairs in +# the call to child.sh in parent.sh. +# +# 2) The arg-val pairs in the call to child.sh do not have to be in the +# same order as the list of "declared" arguments in child.sh. For +# instance, in the example above, the arg-val pair for arg3 is listed +# before the one for arg2. +# +# 3) An argument can be set to an array by starting and ending the value +# portion of its arg-val pair with opening and closing parentheses, +# repsectively, and listing the elements within (each one in a set of +# double-quotes and separated fromt the next by whitespace). In the +# example above, this is done for arg4. +# +# 4) If the value portion of an arg-val pair contains an argument that +# is not defined in the array valid_args in child.sh, the call to +# process_args in child.sh will result in an error message and exit +# from the caller. +# +#----------------------------------------------------------------------- +# +function process_args() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -lt 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} array_name_valid_arg_names \ + arg_val_pair1 \ + ... \ + arg_val_pairN + +where the arguments are defined as follows: + + array_name_valid_arg_names: + The name of the array containing a list of valid argument names. + + arg_val_pair1 ... arg_val_pairN: + A list of N argument-value pairs. These have the form + + arg1=\"val1\" ... argN=\"valN\" + + where each argument name (argI) needs to be in the list of valid argu- + ment names specified in array_name_valid_arg_names. Note that not all + the valid arguments listed in array_name_valid_arg_names need to be + set, and the argument-value pairs can be in any order, i.e. they don't + have to follow the order of arguments listed in valid_arg_names_ar- + ray_name. +" + + fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + local array_name_valid_arg_names \ + valid_arg_names_at \ + valid_arg_names \ + num_valid_args \ + num_arg_val_pairs \ + i valid_arg_name arg_already_specified \ + arg_val_pair arg_name arg_value is_array \ + err_msg cmd_line + + array_name_valid_arg_names="$1" + valid_arg_names_at="${array_name_valid_arg_names}[@]" + valid_arg_names=("${!valid_arg_names_at}") + num_valid_args=${#valid_arg_names[@]} +# +#----------------------------------------------------------------------- +# +# Get the number of argument-value pairs (or arg-val pairs, for short) +# being passed into this function. These consist of all arguments +# starting with the 2nd, so we subtract 1 from the total number of argu- +# ments. +# +#----------------------------------------------------------------------- +# + num_arg_val_pairs=$(( $# - 1 )) +# +#----------------------------------------------------------------------- +# +# Make sure that the number of arg-val pairs is less than or equal to +# the number of valid arguments. +# +#----------------------------------------------------------------------- +# + if [ "${num_arg_val_pairs}" -gt "${num_valid_args}" ]; then + valid_arg_names_str=$(printf "\"%s\" " "${valid_arg_names[@]}"); + print_err_msg_exit "\ +The number of argument-value pairs specified on the command line (num_- +arg_val_pairs) must be less than or equal to the number of valid argu- +ments (num_valid_args) specified in the array valid_arg_names: + num_arg_val_pairs = ${num_arg_val_pairs} + num_valid_args = ${num_valid_args} + valid_arg_names = ( ${valid_arg_names_str})" + fi +# +#----------------------------------------------------------------------- +# +# Initialize all valid arguments to the null string. Note that the +# scope of this initialization is global, i.e. the calling script or +# function will be aware of these initializations. Also, initialize +# each element of the array arg_already_specified to "false". This ar- +# ray keeps track of whether each valid argument has already been set +# to a value by an arg-val specification. +# +#----------------------------------------------------------------------- +# + for (( i=0; i<${num_valid_args}; i++ )); do + valid_arg_name="${valid_arg_names[$i]}" + eval ${valid_arg_name}="" + arg_already_specified[$i]="false" + done +# +#----------------------------------------------------------------------- +# +# Loop over the list of arg-val pairs and set argument values. +# +#----------------------------------------------------------------------- +# + for arg_val_pair in "${@:2}"; do + + arg_name=$(echo ${arg_val_pair} | cut -f1 -d=) + arg_value=$(echo ${arg_val_pair} | cut -f2 -d=) +# +# If the first character of the argument's value is an opening parenthe- +# sis and its last character is a closing parenthesis, then the argument +# is an array. Check for this and set the is_array flag accordingly. +# + is_array="false" + if [ "${arg_value:0:1}" = "(" ] && \ + [ "${arg_value: -1}" = ")" ]; then + is_array="true" + fi +# +#----------------------------------------------------------------------- +# +# Make sure that the argument name specified by the current argument- +# value pair is valid. +# +#----------------------------------------------------------------------- +# + err_msg="\ +The specified argument name (arg_name) in the current argument-value +pair (arg_val_pair) is not valid: + arg_val_pair = \"${arg_val_pair}\" + arg_name = \"${arg_name}\"" + check_var_valid_value "arg_name" "valid_arg_names" "${err_msg}" +# +#----------------------------------------------------------------------- +# +# Loop through the list of valid argument names and find the one that +# the current arg-val pair corresponds to. Then set that argument to +# the specified value. +# +#----------------------------------------------------------------------- +# + for (( i=0; i<${num_valid_args}; i++ )); do + + valid_arg_name="${valid_arg_names[$i]}" + if [ "${arg_name}" = "${valid_arg_name}" ]; then +# +# Check whether the current argument has already been set by a previous +# arg-val pair on the command line. If not, proceed to set the argument +# to the specified value. If so, print out an error message and exit +# the calling script. +# + if [ "${arg_already_specified[$i]}" = "false" ]; then + arg_already_specified[$i]="true" + if [ "${is_array}" = "true" ]; then + eval ${arg_name}=${arg_value} + else + eval ${arg_name}=\"${arg_value}\" + fi + else + cmd_line=$( printf "\'%s\' " "${@:1}" ) + print_err_msg_exit "\ +The current argument has already been assigned a value on the command +line: + arg_name = \"${arg_name}\" + cmd_line = ${cmd_line} +Please assign values to arguments only once on the command line." + fi + fi + + done + + done +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/save_restore_shell_opts.sh b/ush/bash_utils/save_restore_shell_opts.sh similarity index 100% rename from ush/save_restore_shell_opts.sh rename to ush/bash_utils/save_restore_shell_opts.sh diff --git a/ush/bash_utils/set_bash_param.sh b/ush/bash_utils/set_bash_param.sh new file mode 100644 index 0000000000..12c2e49be4 --- /dev/null +++ b/ush/bash_utils/set_bash_param.sh @@ -0,0 +1,144 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that replaces placeholder values of vari- +# ables in several different types of files with actual values. +# +#----------------------------------------------------------------------- +# +function set_bash_param() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 3 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} file_full_path param value + +where the arguments are defined as follows: + + file_full_path: + Full path to the file in which the specified parameter's value will be + set. + + param: + Name of the parameter whose value will be set. + + value: + Value to set the parameter to. +" + + fi +# +#----------------------------------------------------------------------- +# +# Set local variables to appropriate input arguments. +# +#----------------------------------------------------------------------- +# + local file_full_path="$1" + local param="$2" + local value="$3" +# +#----------------------------------------------------------------------- +# +# Extract just the file name from the full path. +# +#----------------------------------------------------------------------- +# + local file="${file_full_path##*/}" +# +#----------------------------------------------------------------------- +# +# Print out an informational message. +# +#----------------------------------------------------------------------- +# + print_info_msg "\ +Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." +# +#----------------------------------------------------------------------- +# +# The procedure we use to set the value of the specified parameter de- +# pends on the file the parameter is in. Compare the file name to sev- +# eral known file names and set the regular expression to search for +# (regex_search) and the one to replace with (regex_replace) according- +# ly. See the default configuration file (config_defaults.sh) for defi- +# nitions of the known file names. +# +#----------------------------------------------------------------------- +# + local regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" + local regex_replace="\1\"$value\"\5" +# +#----------------------------------------------------------------------- +# +# Use grep to determine whether regex_search exists in the specified +# file. If so, perform the regex replacement using sed. If not, print +# out an error message and exit. +# +#----------------------------------------------------------------------- +# + grep -q -E "${regex_search}" "${file_full_path}" || { \ + print_err_msg_exit "\ +Specified file (file_full_path) does not contain the searched-for regu- +lar expression (regex_search): + file_full_path = \"${file_full_path}\" + param = \"$param\" + value = \"$value\" + regex_search = ${regex_search}" + }; + + sed -i -r -e "s%${regex_search}%${regex_replace}%" "${file_full_path}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/set_file_param.sh b/ush/bash_utils/set_file_param.sh similarity index 74% rename from ush/set_file_param.sh rename to ush/bash_utils/set_file_param.sh index 2ad365b329..72a9ab041d 100644 --- a/ush/set_file_param.sh +++ b/ush/bash_utils/set_file_param.sh @@ -19,27 +19,54 @@ function set_file_param() { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -ne 3 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + Usage: - ${FUNCNAME[0]} file_full_path param value + ${func_name} file_full_path param value where the arguments are defined as follows: file_full_path: - Full path to the file in which the specified parameter's value will be set. + Full path to the file in which the specified parameter's value will be + set. param: Name of the parameter whose value will be set. value: - Value to set the parameter to." + Value to set the parameter to. +" fi # @@ -67,7 +94,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - print_info_msg_verbose "\ + print_info_msg "$VERBOSE" "\ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # #----------------------------------------------------------------------- @@ -86,40 +113,35 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." case $file in # - "$WFLOW_XML_FN") + "${WFLOW_XML_FN}") regex_search="(^\s*.*)" regex_replace="\1$value\3" ;; # - "$RGNL_GRID_NML_FN") + "${RGNL_GRID_NML_FN}") regex_search="^(\s*$param\s*=)(.*)" regex_replace="\1 $value" ;; # - "$FV3_NML_FN" | "$FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN" | "$FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN" | "$FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN") + "${FV3_NML_FN}") regex_search="^(\s*$param\s*=)(.*)" regex_replace="\1 $value" ;; # - "$DIAG_TABLE_FN" | "$DIAG_TABLE_CCPP_GSD_FN" | "$DIAG_TABLE_CCPP_GSD_FN") + "${DIAG_TABLE_FN}") regex_search="(.*)(<$param>)(.*)" regex_replace="\1$value\3" ;; # - "$MODEL_CONFIG_FN") + "${MODEL_CONFIG_FN}") regex_search="^(\s*$param:\s*)(.*)" regex_replace="\1$value" ;; # - "$SCRIPT_VAR_DEFNS_FN") - regex_search="(^\s*$param=)(\".*\"|[^ \"]*)(\s*[#].*)?$" # Whole line with regex_replace=\1. -# regex_search="(^\s*$param=)(\".*\"|[^ \"]*)(\s*[#].*)?" + "${GLOBAL_VAR_DEFNS_FN}") regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" -# regex_replace="\1\"$value\"\3" -# regex_replace="\1$value\3" -# regex_replace="\1\3" -# regex_replace="\1AAAA\2BBBB\3CCCC\4DDDD\5" regex_replace="\1$value\5" +# set_bash_param "${file_full_path}" "$param" "$value" ;; # #----------------------------------------------------------------------- @@ -131,7 +153,6 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # *) print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": The regular expressions for performing search and replace have not been specified for this file: file = \"$file\"" @@ -147,18 +168,18 @@ specified for this file: # #----------------------------------------------------------------------- # - grep -q -E "$regex_search" $file_full_path + grep -q -E "${regex_search}" "${file_full_path}" if [ $? -eq 0 ]; then - sed -i -r -e "s%$regex_search%$regex_replace%" $file_full_path + sed -i -r -e "s%${regex_search}%${regex_replace}%" "${file_full_path}" else print_err_msg_exit "\ -Specified file (file_full_path) does not contain the searched-for regular -expression (regex_search): - file_full_path = \"$file_full_path\" +Specified file (file_full_path) does not contain the searched-for regu- +lar expression (regex_search): + file_full_path = \"${file_full_path}\" param = \"$param\" value = \"$value\" - regex_search = $regex_search" + regex_search = ${regex_search}" fi # #----------------------------------------------------------------------- @@ -169,6 +190,6 @@ expression (regex_search): #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 -} +} diff --git a/ush/cmp_expt_to_baseline.sh b/ush/cmp_expt_to_baseline.sh index 4961a817fc..4cce2481ec 100755 --- a/ush/cmp_expt_to_baseline.sh +++ b/ush/cmp_expt_to_baseline.sh @@ -16,23 +16,52 @@ # Script has only been tested on theia #----------------------------------------------------------------------- +# Do these need to be machine specific, e.g. by using modulefiles? module load intel module load nccmp # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +. ${scrfunc_dir}/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# # Process arguments. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -if [ $# -ne 1 ] && [ $# -ne 2 ]; then +if [ $# -eq 0 ] || [ $# -gt 2 ]; then printf " -ERROR from script ${script_name}: +ERROR from script ${scrfunc_fn}: Only 1 or 2 arguments may be specified. Usage: - > ${script_name} expt_dir [baseline_dir] + > ${scrfunc_fn} expt_dir [baseline_dir] where expt_dir is the experiment directory and baseline_dir is an op- tional baseline directory. @@ -50,12 +79,10 @@ fi # expt_dir="$1" if [ ! -d "${expt_dir}" ]; then - printf "\n + print_err_msg_exit "\ The specified experiment directory (expt_dir) does not exist: expt_dir = \"$expt_dir\" -Exiting script with nonzero return code. -" - exit 1 +Exiting script with nonzero return code." fi # #----------------------------------------------------------------------- @@ -119,11 +146,10 @@ fi # #----------------------------------------------------------------------- # -printf " +print_info_msg " The experiment and baseline directories are: expt_dir = \"$expt_dir\" - baseline_dir = \"$baseline_dir\" -" + baseline_dir = \"$baseline_dir\"" # #----------------------------------------------------------------------- # @@ -237,7 +263,8 @@ Exiting script with nonzero exit code. #----------------------------------------------------------------------- # cd ${expt_dir}/$subdir - num_files=$( ls -1 *.${file_ext} 2>/dev/null | wc -l ) +# num_files=$( ls -1 *.${file_ext} 2>/dev/null | wc -l ) + num_files=$( count_files *.${file_ext} 2>/dev/null | wc -l ) printf " Number of files with extension \"${file_ext}\" in subdirectory \"$subdir\" of the experiment directory is: ${num_files} @@ -370,5 +397,15 @@ else fi printf "Final result of regression test: ${result_str}\n" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + exit ${exit_code} diff --git a/ush/cmp_rundirs_ncfiles.sh b/ush/cmp_rundirs_ncfiles.sh index bbe5ecd578..c650454877 100755 --- a/ush/cmp_rundirs_ncfiles.sh +++ b/ush/cmp_rundirs_ncfiles.sh @@ -23,12 +23,12 @@ function cmp_ncfiles_one_dir() { for fn in *.$fileext; do fn1="$fn" - if [ -f "$fn1" ] && [ ! -h "$fn1" ]; then # Check if regular file and not a symlink. + if [ -f "$fn1" ] && [ ! -L "$fn1" ]; then # Check if regular file and not a symlink. fn2="$dir2/$subdir/$fn" if [ -e "$fn2" ]; then # Check if file exists. - if [ -f "$fn2" ] && [ ! -h "$fn2" ]; then # Check if regular file and not a symlink. + if [ -f "$fn2" ] && [ ! -L "$fn2" ]; then # Check if regular file and not a symlink. printf "\nComparing file \"$fn\" in subdirectory \"$subdir\" ...\n" nccmp -d $fn1 $fn2 diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh index a24ed139ae..0fa8697f38 100644 --- a/ush/compare_config_scripts.sh +++ b/ush/compare_config_scripts.sh @@ -1,26 +1,43 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. - # #----------------------------------------------------------------------- # -# This script checks that all variables defined in the local configura- -# tion script (whose file name is stored in the variable LOCAL_CONFIG_- -# FN) are also assigned a default value in the default configuration -# script (whose file name is stored in the variable DEFAULT_CONFIG_FN). +# This file defines and then calls a function that checks that all vari- +# ables defined in the user-specified experiment/workflow configuration +# file (whose file name is stored in the variable EXPT_CONFIG_FN) are +# also assigned default values in the default configuration file (whose +# file name is stored in the variable DEFAULT_EXPT_CONFIG_FN). # #----------------------------------------------------------------------- # - +function compare_config_scripts() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. # #----------------------------------------------------------------------- # -# Source function definition files. +local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -. ./source_funcs.sh +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +. ${scrfunc_dir}/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -34,16 +51,16 @@ #----------------------------------------------------------------------- # # Create a list of variable settings in the default workflow/experiment -# default script by stripping out comments, blank lines, extraneous -# leading whitespace, etc from that script and saving the result in the -# variable var_list_default. Each line of var_list_default will have -# the form +# file (script) by stripping out comments, blank lines, extraneous lead- +# ing whitespace, etc from that file and saving the result in the varia- +# ble var_list_default. Each line of var_list_default will have the +# form # # VAR=... # # where the VAR is a variable name and ... is the value (including any # trailing comments). Then create an equivalent list for the local con- -# figuration script and save the result in var_list_local. +# figuration file and save the result in var_list_local. # #----------------------------------------------------------------------- # @@ -52,7 +69,7 @@ sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${DEFAULT_CONFIG_FN} \ + ${DEFAULT_EXPT_CONFIG_FN} \ ) var_list_local=$( \ @@ -60,14 +77,14 @@ sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${LOCAL_CONFIG_FN} \ + ${EXPT_CONFIG_FN} \ ) # #----------------------------------------------------------------------- # # Loop through each line of var_list_local. For each line, extract the # the name of the variable that is being set (say VAR) and check that -# this variable is set somewhere in the default configuration script by +# this variable is set somewhere in the default configuration file by # verifying that a line that starts with "VAR=" exists in var_list_de- # fault. # @@ -78,13 +95,15 @@ while read crnt_line; do # Note that a variable name will be found only if the equal sign immed- # iately follows the variable name. # - var_name=$( printf "%s" "${crnt_line}" | sed -n -r -e "s/^([^ ]*)=.*/\1/p") + var_name=$( printf "%s" "${crnt_line}" | sed -n -r -e "s/^([^ =\"]*)=.*/\1/p") if [ -z "${var_name}" ]; then - print_info_msg "\ -Current line of configuration script \"${LOCAL_CONFIG_FN}\" does not contain -a variable name: + print_info_msg " +Current line (crnt_line) of user-specified experiment/workflow configu- +ration file (EXPT_CONFIG_FN) does not contain a variable name (i.e. +var_name is empty): + EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" crnt_line = \"${crnt_line}\" var_name = \"${var_name}\" Skipping to next line." @@ -98,11 +117,14 @@ Skipping to next line." # grep "^${var_name}=" <<< "${var_list_default}" > /dev/null 2>&1 || \ print_err_msg_exit "\ -Variable in local configuration script \"${LOCAL_CONFIG_FN}\" not set in default -configuration script \"${DEFAULT_CONFIG_FN}\": +The variable specified by var_name in the user-specified experiment/ +workflow configuration file (EXPT_CONFIG_FN) does not appear in the de- +fault experiment/workflow configuration file (DEFAULT_EXPT_CONFIG_FN): + EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" + DEFAULT_EXPT_CONFIG_FN = \"${DEFAULT_EXPT_CONFIG_FN}\" var_name = \"${var_name}\" -Please assign a default value to this variable in \"${DEFAULT_CONFIG_FN}\" -and rerun." +Please assign a default value to this variable in the default configura- +tion file and rerun." fi @@ -117,4 +139,13 @@ done <<< "${var_list_local}" # { restore_shell_opts; } > /dev/null 2>&1 +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +compare_config_scripts diff --git a/ush/config.community.sh b/ush/config.community.sh new file mode 100644 index 0000000000..bf75da0771 --- /dev/null +++ b/ush/config.community.sh @@ -0,0 +1,32 @@ +MACHINE="hera" +ACCOUNT="an_account" +EXPT_SUBDIR="test_community" + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/ush/config.nco.sh b/ush/config.nco.sh new file mode 100644 index 0000000000..ef63ec9b81 --- /dev/null +++ b/ush/config.nco.sh @@ -0,0 +1,58 @@ +MACHINE="hera" +ACCOUNT="an_account" +EXPT_SUBDIR="test_nco" + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="nco" +PREEXISTING_DIR_METHOD="rename" + +EMC_GRID_NAME="conus_c96" # For now, this is maps to PREDEF_GRID_NAME="EMC_CONUS_coarse". +GRID_GEN_METHOD="GFDLgrid" + +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190901" +DATE_LAST_CYCL="20190901" +CYCL_HRS=( "18" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +# +# In NCO mode, the following don't need to be explicitly set to "FALSE" +# in this configuration file because the experiment generation script +# will do this (along with printing out an informational message). +# +#RUN_TASK_MAKE_GRID="FALSE" +#RUN_TASK_MAKE_OROG="FALSE" +#RUN_TASK_MAKE_SFC_CLIMO="FALSE" + +RUN="an_experiment" +COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" # Path to directory containing files from the external model (FV3GFS). +STMP="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/NCO_dirs/stmp" # Path to directory STMP that mostly contains input files. +PTMP="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/NCO_dirs/ptmp" # Path to directory PTMP in which the experiment's output files will be placed. + +# +# In NCO mode, the user must manually (e.g. after doing the build step) +# create the symlink "${FIXrrfs}/fix_sar" that points to EMC's FIXsar +# directory on the machine. For example, on hera, the symlink's target +# needs to be +# +# /scratch2/NCEPDEV/fv3-cam/emc.campara/fix_fv3cam/fix_sar +# +# The experiment generation script will then set FIXsar to +# +# FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" +# +# where EMC_GRID_NAME has the value set above. +# + diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 282018220f..c5d38bfe62 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -1,11 +1,35 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. +# +#----------------------------------------------------------------------- +# +# This file sets the experiment's configuration variables (which are +# global shell variables) to their default values. For many of these +# variables, the valid values that they may take on are defined in the +# file $USHDIR/valid_param_vals.sh. +# +#----------------------------------------------------------------------- +# # #----------------------------------------------------------------------- # +# Set the RUN_ENVIR variable that is listed and described in the WCOSS +# Implementation Standards document: +# +# NCEP Central Operations +# WCOSS Implementation Standards +# April 17, 2019 +# Version 10.2.0 +# +# RUN_ENVIR is described in this document as follows: # +# Set to "nco" if running in NCO's production environment. Used to +# distinguish between organizations. +# +# Valid values are "nco" and "community". Here, we use it to generate +# and run the experiment either in NCO mode (if RUN_ENVIR is set to "nco") +# or in community mode (if RUN_ENVIR is set to "community"). This has +# implications on the experiment variables that need to be set and the +# the directory structure used. # #----------------------------------------------------------------------- # @@ -17,33 +41,29 @@ RUN_ENVIR="nco" # Set machine and queue parameters. Definitions: # # MACHINE: -# Machine on which the workflow will run. Valid values are "WCOSS_C", -# "WCOSS", "DELL", "THEIA","HERA","JET", "ODIN", and "CHEYENNE". New values -# may be added as the workflow is ported to additional machines. +# Machine on which the workflow will run. # # ACCOUNT: # The account under which to submit jobs to the queue. # # QUEUE_DEFAULT: # The default queue to which workflow tasks are submitted. If a task -# does not have a specific variable in which its queue is defined (e.g. -# QUEUE_HPSS, QUEUE_FCST; see below), it is submitted to this -# queue. If this is not set or set to an empty string, it will be reset -# to a machine-dependent value in the setup script (setup.sh). +# does not have a specific variable that specifies the queue to which it +# will be submitted (e.g. QUEUE_HPSS, QUEUE_FCST; see below), it will be +# submitted to the queue specified by this variable. If this is not set +# or is set to an empty string, it will be (re)set to a machine-dependent +# value. # # QUEUE_HPSS: -# The queue to which the tasks that get or link to external model files -# (needed to generate ICs and LBCs) are submitted. This task either co- -# pies the GFS analysis and forecast files from a system direc- -# tory or fetches them from HPSS. In either case, it places the files -# in a temporary directory. If this is not set or set to an empty -# string, it will be reset to a machine-dependent value in the setup -# script (setup.sh). +# The queue to which the tasks that get or create links to external model +# files [which are needed to generate initial conditions (ICs) and lateral +# boundary conditions (LBCs)] are submitted. If this is not set or is +# set to an empty string, it will be (re)set to a machine-dependent value. # # QUEUE_FCST: -# The queue to which the run_FV3 task is submitted. This task runs -# the forecast. If this is not set or set to an empty string, it will -# be reset to a machine-dependent value in the setup script (setup.sh). +# The queue to which the task that runs a forecast is submitted. If this +# is not set or set to an empty string, it will be (re)set to a machine- +# dependent value. # # mach_doc_end # @@ -57,27 +77,53 @@ QUEUE_FCST="production_queue" # #----------------------------------------------------------------------- # +# Set cron-related parameters. Definitions: +# +# USE_CRON_TO_RELAUNCH: +# Flag that determines whether or not to add a line to the user's cron +# table to call the experiment launch script every CRON_RELAUNCH_INTVL_MNTS +# minutes. +# +# CRON_RELAUNCH_INTVL_MNTS: +# The interval (in minutes) between successive calls of the experiment +# launch script by a cron job to (re)launch the experiment (so that the +# workflow for the experiment kicks off where it left off). +# +#----------------------------------------------------------------------- +# +USE_CRON_TO_RELAUNCH="FALSE" +CRON_RELAUNCH_INTVL_MNTS="03" +# +#----------------------------------------------------------------------- +# # dir_doc_start # Set directories. Definitions: # # EXPT_BASEDIR: # The base directory in which the experiment directory will be created. # If this is not specified or if it is set to an empty string, it will -# default to $BASEDIR/expt_dirs. The full path to the experiment di- -# rectory, which we will denote by EXPTDIR, will be set to $EXPT_BASEDIR -# /$EXPT_SUBDIR (also see definition of EXPT_SUBDIR). +# default to ${HOMErrfs}/../expt_dirs. # # EXPT_SUBDIR: -# The name that the experiment directory (without the full path) will -# have. The full path to the experiment directory, which we will denote -# by EXPTDIR, will be set to ${EXPT_BASEDIR}/${EXPT_SUBDIR} (also see -# definition of EXPT_BASEDIR). +# The name that the experiment directory (without the full path) will +# have. The full path to the experiment directory, which will be contained +# in the variable EXPTDIR, will be: +# +# EXPTDIR="${EXPT_BASEDIR}/${EXPT_SUBDIR}" +# +# This cannot be empty. If set to a null string here, it must be set to +# a (non-empty) value in the user-defined experiment configuration file. +# +# NET, envir, RUN, COMINgfs, STMP, PTMP: +# Directories or variables used to create directory names that are needed +# when generating and running an experiment in NCO mode (see the description +# of the RUN_ENVIR variable above). These are defined in the WCOSS +# Implementation Standards document and thus will not be described here. +# # dir_doc_end # #----------------------------------------------------------------------- # -#EXPT_BASEDIR="/path/to/directory/in/which/experiment/subdirs/will/exist" -#EXPT_SUBDIR="my_test" EXPT_BASEDIR="" EXPT_SUBDIR="" @@ -90,49 +136,22 @@ PTMP="/path/to/temporary/directory/ptmp" # #----------------------------------------------------------------------- # -# File names. Definitions: +# Set file names. Definitions: # # RGNL_GRID_NML_FN: -# Name of file containing the namelist settings for the utility that ge- -# nerates a "JPgrid" type of regional grid. +# Name of file containing the namelist settings for the code that generates +# a "JPgrid" type of regional grid. # # FV3_NML_FN: -# Name of file containing the FV3SAR namelist settings. -# -# FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN: -# Name of file containing the FV3SAR namelist settings for a CCPP- -# enabled forecast that uses GFS external model data and GFS physics. -# -# FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN: -# Name of file containing the FV3SAR namelist settings for a CCPP- -# enabled forecast that uses GFS external model data and GSD physics. -# -# FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN: -# Name of file containing the FV3SAR namelist settings for a CCPP- -# enabled forecast that uses RAP or HRRR external model data and GSD physics. +# Name of file containing the forecast model's namelist settings. # # DIAG_TABLE_FN: -# Name of file that specifies the fields that the FV3SAR will output. -# -# DIAG_TABLE_CCPP_GFS_FN: -# Name of file that specifies the fields that the FV3SAR will output for -# a CCPP-enabled forecast that uses GFS physics. This is needed because -# the current version of the CCPP-enabled FV3SAR executable using GFS -# physics cannot handle refl_10cm variable in diag_table. -# -# DIAG_TABLE_CCPP_GSD_FN: -# Name of file that specifies the fields that the FV3SAR will output for -# a CCPP-enabled forecast that uses GSD physics. This includes varia- -# bles specific to Thompson microphysics. +# Name of file that specifies the fields that the forecast model will +# output. # # FIELD_TABLE_FN: -# Name of file that specifies the traces that the FV3SAR will read in -# from the IC/BC files. -# -# FIELD_TABLE_CCPP_GSD_FN: -# Name of file that specifies the traces that the FV3SAR will read in -# from the IC/BC files for a CCPP-enabled forecast that uses GSD phys- -# ics. +# Name of file that specifies the tracers that the forecast model will +# read in from the IC/LBC files. # # DATA_TABLE_FN: # Name of file that specifies ??? @@ -144,45 +163,41 @@ PTMP="/path/to/temporary/directory/ptmp" # Name of file that specifies ??? # # WFLOW_XML_FN: -# Name of the workflow XML file to be passed to rocoto. -# -# SCRIPT_VAR_DEFNS_FN: -# Name of file that is sourced by the worflow scripts to set variable -# values. -# -# WRTCMP_PARAMS_TEMPLATE_FN: -# Name of the template file that needs to be appended to the model con- -# figuration file (MODEL_CONFIG_FN) if the write component (QUILTING) is -# going to be used to write output files. This file contains defini- -# tions (either in terms of actual values or placeholders) of the para- -# meters that the write component needs. If the write component is go- -# ing to be used, this file is first appended to MODEL_CONFIG_FN, and -# any placeholder values in the variable definitions in the new MODEL_- -# CONFIG_FN file are subsequently replaced by actual values. If a pre- -# defined domain is being used (see PREDEF_GRID_NAME below), WRTCMP_PA- -# RAMS_TEMPLATE_FN may be set to an empty string. In this case, it will -# be reset to the name of the existing template file for that predefined -# domain. It is assumed that the file specified by WRTCMP_PARAMS_TEMP- -# LATE_FN is located in the templates directory TEMPLATE_DIR, which is -# in turn defined in the setup script. +# Name of the rocoto workflow XML file that the experiment generation +# script creates and that defines the workflow for the experiment. +# +# GLOBAL_VAR_DEFNS_FN: +# Name of file containing the defintions of the primary experiment variables +# (parameters) defined in this default configuration script and in the +# user-specified configuration as well as secondary experiment variables +# generated by the experiment generation script. This file is sourced +# by many scripts (e.g. the J-job scripts corresponding to each workflow +# task) in order to make all the experiment variables available in those +# scripts. +# +# WFLOW_LAUNCH_SCRIPT_FN: +# Name of the script that can be used to (re)launch the experiment's rocoto +# workflow. +# +# WFLOW_LAUNCH_LOG_FN: +# Name of the log file that contains the output from successive calls to +# the workflow launch script (WFLOW_LAUNCH_SCRIPT_FN). # #----------------------------------------------------------------------- # RGNL_GRID_NML_FN="regional_grid.nml" -FV3_NML_FN="input.nml" -FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN="input_ccpp_gfsextern_gfsphys.nml" -FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN="input_ccpp_gfsextern_gsdphys.nml" -FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN="input_ccpp_raphrrrextern_gsdphys.nml" + +DATA_TABLE_FN="data_table" DIAG_TABLE_FN="diag_table" -DIAG_TABLE_CCPP_GSD_FN="diag_table_ccpp_gsd" FIELD_TABLE_FN="field_table" -FIELD_TABLE_CCPP_GSD_FN="field_table_ccpp_gsd" -DATA_TABLE_FN="data_table" +FV3_NML_FN="input.nml" MODEL_CONFIG_FN="model_configure" NEMS_CONFIG_FN="nems.configure" + WFLOW_XML_FN="FV3SAR_wflow.xml" -SCRIPT_VAR_DEFNS_FN="var_defns.sh" -WRTCMP_PARAMS_TEMPLATE_FN="" +GLOBAL_VAR_DEFNS_FN="var_defns.sh" +WFLOW_LAUNCH_SCRIPT_FN="launch_FV3SAR_wflow.sh" +WFLOW_LAUNCH_LOG_FN="log.launch_FV3SAR_wflow" # #----------------------------------------------------------------------- # @@ -190,13 +205,11 @@ WRTCMP_PARAMS_TEMPLATE_FN="" # # DATE_FIRST_CYCL: # Starting date of the first forecast in the set of forecasts to run. -# Format is "YYYYMMDD". Note that this does not include the hour-of- -# day. +# Format is "YYYYMMDD". Note that this does not include the hour-of-day. # # DATE_LAST_CYCL: # Starting date of the last forecast in the set of forecasts to run. -# Format is "YYYYMMDD". Note that this does not include the hour-of- -# day. +# Format is "YYYYMMDD". Note that this does not include the hour-of-day. # # CYCL_HRS: # An array containing the hours of the day at which to launch forecasts. @@ -217,114 +230,97 @@ FCST_LEN_HRS="24" # #----------------------------------------------------------------------- # -# Set initial and lateral boundary condition generation parameters. De- -# finitions: +# Set initial and lateral boundary condition generation parameters. +# Definitions: # -# EXTRN_MDL_NAME_ICS +# EXTRN_MDL_NAME_ICS: #`The name of the external model that will provide fields from which -# initial condition (IC) and surface files will be generated for input -# into the FV3SAR. +# initial condition (including and surface) files will be generated for +# input into the forecast model. # -# EXTRN_MDL_NAME_LBCS +# EXTRN_MDL_NAME_LBCS: #`The name of the external model that will provide fields from which -# lateral boundary condition (LBC) files will be generated for input in- -# to the FV3SAR. +# lateral boundary condition (LBC) files will be generated for input into +# the forecast model. # # LBC_UPDATE_INTVL_HRS: -# The frequency (in integer hours) with which lateral boundary data will -# be provided to the FV3SAR model. We will refer to this as the bound- -# ary update interval. If the boundary data is obtained from GFS fore- -# cast files in nemsio format stored in HPSS (mass store), then LBC_UP- -# DATE_INTVL_HRS must be greater than or equal to 6 because these fore- -# cast files are available only every 6 hours. -# -# EXTRN_MDL_INFO_FN: -# Name of sourceable file (not including the full path) defining the va- -# riables specified in EXTRN_MDL_INFO_VAR_NAMES (see below). -# -# EXTRN_MDL_INFO_VAR_NAMES: -# Names to use for the following parameters (for a given cycle of the -# FV3SAR): -# * The date and hour-of-day (in YYYYMMDDHH format) of the start time of -# the external model. -# * Array containing the forecast hours (relative to the -# * Array containing the names of the external model output files. -# * The system directory in which the external model output files may be -# found (if the cycle start time is not too old). -# * The format of the archive file (e.g. "tar", "zip", etc) on HPSS that -# may contain the external model output files. Note that this archive -# file will exist only if the cycle start time is old enough. -# * The name of the archive file on HPSS that may contain the external -# model output files. -# * The full path to the archive file on HPSS that may contain the ex- -# ternal model output files. -# * The directory "within" the archive file in which the external model -# output files are stored. +# The interval (in integer hours) with which LBC files will be generated. +# We will refer to this as the boundary update interval. Note that the +# model specified in EXTRN_MDL_NAME_LBCS must have data available at a +# frequency greater than or equal to that implied by LBC_UPDATE_INTVL_HRS. +# For example, if LBC_UPDATE_INTVL_HRS is set to 6, then the model must +# have data availble at least every 6 hours. It is up to the user to +# ensure that this is the case. +# +# FV3GFS_FILE_FMT_ICS: +# If using the FV3GFS model as the source of the ICs (i.e. if EXTRN_MDL_NAME_ICS +# is set to "FV3GFS"), this variable specifies the format of the model +# files to use when generating the ICs. +# +# FV3GFS_FILE_FMT_LBCS: +# If using the FV3GFS model as the source of the LBCs (i.e. if +# EXTRN_MDL_NAME_LBCS is set to "FV3GFS"), this variable specifies the +# format of the model files to use when generating the LBCs. # #----------------------------------------------------------------------- # EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" -FV3GFS_DATA_TYPE="nemsio" LBC_UPDATE_INTVL_HRS="6" +FV3GFS_FILE_FMT_ICS="nemsio" +FV3GFS_FILE_FMT_LBCS="nemsio" # #----------------------------------------------------------------------- # -# Flag controlling whether or not a CCPP-enabled version of the FV3SAR -# will be run. This must be set to "TRUE" or "FALSE". Setting this -# flag to "TRUE" will cause the workflow to stage the appropriate CCPP- -# enabled versions of the FV3SAR executable and various input files -# (e.g. the FV3SAR namelist file, the diagnostics table file, the field -# table file, etc) that have settings that correspond to EMC's CCPP-ena- -# bled FV3SAR regression test. It will also cause additional files -# (i.e. in addition to the ones for the non-CCPP enabled version of the -# FV3SAR) to be staged in the experiment directory (e.g. module setup -# scripts, module load files). -# -#----------------------------------------------------------------------- -# -USE_CCPP="FALSE" -# -#----------------------------------------------------------------------- -# -# If CCPP has been set to "TRUE", the CCPP_PHYS_SUITE variable defines -# the physics suite that will run using CCPP. This affects the FV3SAR -# namelist file, the diagnostics table file, the field table file, and -# the XML physics suite definition file that are staged in the experi- -# ment directory and/or the run directories under it. As of 4/4/2019, -# valid values for this parameter are: +# Set CCPP related parameters. Definitions: # -# "GFS" - to run with the GFS physics suite -# "GSD" - to run with the GSD physics suite +# USE_CCPP: +# Flag controlling whether or not a CCPP-enabled version of the forecast +# model will be run. Note that the user is responsible for ensuring that +# a CCPP-enabled forecast model executable is built and placed at the +# correct location (that is part of the build process). # -# Note that with CCPP set to "FALSE", the only physics suite that can be -# run is the GFS. +# CCPP_PHYS_SUITE: +# If USE_CCPP has been set to "TRUE", this variable defines the physics +# suite that will run using CCPP. The choice of physics suite determines +# the forecast model's namelist file, the diagnostics table file, the +# field table file, and the XML physics suite definition file that are +# staged in the experiment directory or the cycle directories under it. +# If USE_CCPP is set to "FALSE", the only physics suite that can be run +# is the GFS. # -# IMPORTANT NOTE: -# It is up to the user to ensure that the CCPP FV3 executable is com- -# piled with either the dynamic build or the static build with the cor- -# rect physics package. If using a static build, the run will fail if -# there is a mismatch between the physics package specified in this con- -# figuration file and the physics package used for the static build. +# Note that it is up to the user to ensure that the CCPP-enabled forecast +# model executable is built with either the dynamic build (which can +# handle any CCPP physics package but is slower to run) or the static +# build with the correct physics package. If using a static build, the +# forecast will fail if the physics package specified in the experiment's +# variable defintions file (GLOBAL_VAR_DEFNS_FN) is not the same as the +# one that was used for the static build. # #----------------------------------------------------------------------- # -CCPP_PHYS_SUITE="GSD" -#CCPP_PHYS_SUITE="GFS" +USE_CCPP="FALSE" +CCPP_PHYS_SUITE="FV3_GSD_v0" # #----------------------------------------------------------------------- # -# Set GRID_GEN_METHOD. This variable specifies the method to use to ge- -# nerate a regional grid in the horizontal. The values that grid_gen_- -# method can take on are: +# Set GRID_GEN_METHOD. This variable specifies the method to use to +# generate a regional grid in the horizontal, or, if using pregenerated +# grid files instead of running the grid generation task, the grid generation +# method that was used to generate those files. The values that +# GRID_GEN_METHOD can take on are: # # * "GFDLgrid": -# This will generate a regional grid by first generating a parent glo- -# bal cubed-sphere grid using GFDL's grid generator. +# This setting will generate a regional grid by first generating a +# "parent" global cubed-sphere grid and then taking a portion of tile +# 6 of that global grid -- referred to in the grid generation scripts +# as "tile 7" even though it doesn't correspond to a complete tile -- +# and using it as the regional grid. Note that the forecast is run on +# only on the regional grid (i.e. tile 7, not tiles 1 through 6). # # * "JPgrid": -# This will generate a regional grid using the map projection deve- -# loped by Jim Purser of EMC. +# This will generate a regional grid using the map projection developed +# by Jim Purser of EMC. # #----------------------------------------------------------------------- # @@ -332,304 +328,292 @@ GRID_GEN_METHOD="JPgrid" # #----------------------------------------------------------------------- # -# Set parameters specific to the method for generating a regional grid -# WITH a global parent (i.e. for GRID_GEN_METHOD set to "GFDLgrid"). -# Note that for this method: +# Set parameters specific to the "GFDLgrid" method of generating a regional +# grid (i.e. for GRID_GEN_METHOD set to "GFDLgrid"). The following +# parameters will be used only if GRID_GEN_METHOD is set to "GFDLgrid". +# In this grid generation method: # -# * The regional grid is defined with respect to a global cubed-sphere -# grid. Thus, the parameters for a global cubed-sphere grid must be -# specified even though the model equations are not integrated on this -# global grid (they are integrated only on the regional grid). +# * The regional grid is defined with respect to a "parent" global cubed- +# sphere grid. Thus, all the parameters for a global cubed-sphere grid +# must be specified in order to define this parent global grid even +# though the model equations are not integrated on (they are integrated +# only on the regional grid). # -# * RES is the number of grid cells in either one of the two horizontal -# directions x and y on any one of the 6 tiles of the global cubed- -# sphere grid. RES must be one of "48", "96", "192", "384", "768", -# "1152", and "3072". The mapping from RES to nominal resolution -# (cell size) for a uniform global grid (i.e. Schmidt stretch factor -# stretch_fac set to 1) is as follows: +# * GFDLgrid_RES is the number of grid cells in either one of the two +# horizontal directions x and y on any one of the 6 tiles of the parent +# global cubed-sphere grid. The mapping from GFDLgrid_RES to a nominal +# resolution (grid cell size) for a uniform global grid (i.e. Schmidt +# stretch factor GFDLgrid_STRETCH_FAC set to 1) for several values of +# GFDLgrid_RES is as follows: # -# C192 --> 50km -# C384 --> 25km -# C768 --> 13km -# C1152 --> 8.5km -# C3072 --> 3.2km +# GFDLgrid_RES typical cell size +# ------------ ----------------- +# 192 50 km +# 384 25 km +# 768 13 km +# 1152 8.5 km +# 3072 3.2 km # -# Note that these are nominal resolutions. The actual cell size on +# Note that these are only typical cell sizes. The actual cell size on # the global grid tiles varies somewhat as we move across a tile. # # * Tile 6 has arbitrarily been chosen as the tile to use to orient the -# global grid on the sphere (Earth). This is done by specifying lon_- -# ctr_T6 and lat_ctr_T6, which are the longitude and latitude (in de- -# grees) of the center of tile 6. +# global parent grid on the sphere (Earth). This is done by specifying +# GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude +# and latitude (in degrees) of the center of tile 6. # -# * Setting the Schmidt stretching factor stretch_fac to a value greater -# than 1 shrinks tile 6, while setting it to a value less than 1 (but -# still greater than 0) expands tile 6. The remaining 5 tiles change +# * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value +# greater than 1 shrinks tile 6, while setting it to a value less than +# 1 (but still greater than 0) expands it. The remaining 5 tiles change # shape as necessary to maintain global coverage of the grid. # -# * The cell size on a given global tile depends on both RES and -# stretch_fac (since changing RES changes the number of cells in the -# tile, and changing stretch_fac modifies the shape and size of the -# tile). +# * The cell size on a given global tile depends on both GFDLgrid_RES and +# GFDLgrid_STRETCH_FAC (since changing GFDLgrid_RES changes the number +# of cells in the tile, and changing GFDLgrid_STRETCH_FAC modifies the +# shape and size of the tile). # # * The regional grid is embedded within tile 6 (i.e. it doesn't extend # beyond the boundary of tile 6). Its exact location within tile 6 is -# is determined by the starting and ending i and j indices -# -# istart_rgnl_T6 -# jstart_rgnl_T6 -# iend_rgnl_T6 -# jend_rgnl_T6 -# -# where i is the grid index in the x direction and j is the grid index -# in the y direction. -# -# * In the FV3SAR code, for convenience the regional grid is denoted as -# "tile 7" even though it doesn't map back to one of the 6 faces of -# the cube from which the global grid is generated (it maps back to -# only a subregion on face 6 since it is wholly confined within tile -# 6). Tile 6 is often referred to as the "parent" tile of the region- -# al grid. -# -# * refine_ratio is the refinement ratio of the regional grid (tile 7) -# with respect to the grid on its parent tile (tile 6), i.e. it is the -# number of grid cells along the boundary of the regional grid that -# abut one cell on tile 6. Thus, the cell size on the regional grid -# depends not only on RES and stretch_fac (because the cell size on -# tile 6 depends on these two parameters) but also on refine_ratio. -# Note that as on the tiles of the global grid, the cell size on the -# regional grid is not uniform but varies as we move across the grid. -# -# Definitions: -# -# RES: -# Number of points in each of the two horizontal directions (x and y) -# on each tile of the global grid. Must be "48", "96", "192", "384", -# "768", "1152", or "3072" -# -# lon_ctr_T6: +# is determined by specifying the starting and ending i and j indices +# of the regional grid on tile 6, where i is the grid index in the x +# direction and j is the grid index in the y direction. These indices +# are stored in the variables +# +# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G +# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G +# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G +# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G +# +# * In the forecast model code and in the experiment generation and workflow +# scripts, for convenience the regional grid is denoted as "tile 7" even +# though it doesn't map back to one of the 6 faces of the cube from +# which the parent global grid is generated (it maps back to only a +# subregion on face 6 since it is wholly confined within tile 6). Tile +# 6 may be referred to as the "parent" tile of the regional grid. +# +# * GFDLgrid_REFINE_RATIO is the refinement ratio of the regional grid +# (tile 7) with respect to the grid on its parent tile (tile 6), i.e. +# it is the number of grid cells along the boundary of the regional grid +# that abut one cell on tile 6. Thus, the cell size on the regional +# grid depends not only on GFDLgrid_RES and GFDLgrid_STRETCH_FAC (because +# the cell size on tile 6 depends on these two parameters) but also on +# GFDLgrid_REFINE_RATIO. Note that as on the tiles of the global grid, +# the cell size on the regional grid is not uniform but varies as we +# move across the grid. +# +# Definitions of parameters that need to be specified when GRID_GEN_METHOD +# is set to "GFDLgrid": +# +# GFDLgrid_LON_T6_CTR: # Longitude of the center of tile 6 (in degrees). # -# lat_ctr_T6: +# GFDLgrid_LAT_T6_CTR: # Latitude of the center of tile 6 (in degrees). # -# stretch_fac: -# Stretching factor used in the Schmidt transformation applied to the -# cubed sphere grid. -# -# istart_rgnl_T6: -# i-index on tile 6 at which the regional grid (tile 7) starts. -# -# iend_rgnl_T6: -# i-index on tile 6 at which the regional grid (tile 7) ends. -# -# jstart_rgnl_T6: -# j-index on tile 6 at which the regional grid (tile 7) starts. +# GFDLgrid_RES: +# Number of points in each of the two horizontal directions (x and y) on +# each tile of the parent global grid. Note that the name of this parameter +# is really a misnomer because although it has the stirng "RES" (for +# "resolution") in its name, it specifies number of grid cells, not grid +# size (in say meters or kilometers). However, we keep this name in order +# to remain consistent with the usage of the word "resolution" in the +# global forecast model and other auxiliary codes. # -# jend_rgnl_T6: -# j-index on tile 6 at which the regional grid (tile 7) ends. +# GFDLgrid_STRETCH_FAC: +# Stretching factor used in the Schmidt transformation applied to the +# parent cubed-sphere grid. # -# refine_ratio: +# GFDLgrid_REFINE_RATIO: # Cell refinement ratio for the regional grid, i.e. the number of cells # in either the x or y direction on the regional grid (tile 7) that abut # one cell on its parent tile (tile 6). # -#----------------------------------------------------------------------- +# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: +# i-index on tile 6 at which the regional grid (tile 7) starts. # -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - - RES="384" - lon_ctr_T6=-97.5 - lat_ctr_T6=35.5 - stretch_fac=1.5 - istart_rgnl_T6=10 - iend_rgnl_T6=374 - jstart_rgnl_T6=10 - jend_rgnl_T6=374 - refine_ratio=3 +# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: +# i-index on tile 6 at which the regional grid (tile 7) ends. # -#----------------------------------------------------------------------- +# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: +# j-index on tile 6 at which the regional grid (tile 7) starts. # -# Set parameters specific to the method for generating a regional grid -# without a global parent (i.e. for GRID_GEN_METHOD set to "JPgrid"). -# These are: +# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: +# j-index on tile 6 at which the regional grid (tile 7) ends. # -# lon_rgnl_ctr: +# GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: +# Flag that determines the file naming convention to use for grid, orography, +# and surface climatology files (or, if using pregenerated files, the +# naming convention that was used to name these files). These files +# usually start with the string "C${RES}_", where RES is an integer. +# In the global forecast model, RES is the number of points in each of +# the two horizontal directions (x and y) on each tile of the global grid +# (defined here as GFDLgrid_RES). If this flag is set to "TRUE", RES will +# be set to GFDLgrid_RES just as in the global forecast model. If it is +# set to "FALSE", we calculate (in the grid generation task) an "equivalent +# global uniform cubed-sphere resolution" -- call it RES_EQUIV -- and +# then set RES equal to it. RES_EQUIV is the number of grid points in +# each of the x and y directions on each tile that a global UNIFORM (i.e. +# stretch factor of 1) cubed-sphere grid would have to have in order to +# have the same average grid size as the regional grid. This is a more +# useful indicator of the grid size because it takes into account the +# effects of GFDLgrid_RES, GFDLgrid_STRETCH_FAC, and GFDLgrid_REFINE_RATIO +# in determining the regional grid's typical grid size, whereas simply +# setting RES to GFDLgrid_RES doesn't take into account the effects of +# GFDLgrid_STRETCH_FAC and GFDLgrid_REFINE_RATIO on the regional grid's +# resolution. Nevertheless, some users still prefer to use GFDLgrid_RES +# in the file names, so we allow for that here by setting this flag to +# "TRUE". +# +#----------------------------------------------------------------------- +# +GFDLgrid_LON_T6_CTR=-97.5 +GFDLgrid_LAT_T6_CTR=35.5 +GFDLgrid_RES="384" +GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_REFINE_RATIO=3 +GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 +GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=374 +GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=10 +GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=374 +GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" +# +#----------------------------------------------------------------------- +# +# Set parameters specific to the "JPgrid" method of generating a regional +# grid (i.e. for GRID_GEN_METHOD set to "JPgrid"). Definitions: +# +# JPgrid_LON_CTR: # The longitude of the center of the grid (in degrees). # -# lat_rgnl_ctr: +# JPgrid_LAT_CTR: # The latitude of the center of the grid (in degrees). # -# delx: +# JPgrid_DELX: # The cell size in the zonal direction of the regional grid (in meters). # -# dely: -# The cell size in the meridional direction of the regional grid (in me- -# ters). +# JPgrid_DELY: +# The cell size in the meridional direction of the regional grid (in +# meters). # -# nx_T7: +# JPgrid_NX: # The number of cells in the zonal direction on the regional grid. # -# ny_T7: +# JPgrid_NY: # The number of cells in the meridional direction on the regional grid. # -# nhw_T7: -# The width of the wide halo (in units of number of cells) to create -# around the regional grid. A grid with a halo of this width will first -# be created and stored in a grid specification file. This grid will -# then be shaved down to obtain grids with 3-cell-wide and 4-cell-wide -# halos. +# JPgrid_WIDE_HALO_WIDTH: +# The width (in units of number of grid cells) of the halo to add around +# the regional grid before shaving the halo down to the width(s) expected +# by the forecast model. # -# a_grid_param: -# The "a" parameter used in the Jim Purser map projection/grid genera- -# tion method. +# In order to generate grid files containing halos that are 3-cell and +# 4-cell wide and orography files with halos that are 0-cell and 3-cell +# wide (all of which are required as inputs to the forecast model), the +# grid and orography tasks first create files with halos around the regional +# domain of width JPgrid_WIDE_HALO_WIDTH cells. These are first stored +# in files. The files are then read in and "shaved" down to obtain grid +# files with 3-cell-wide and 4-cell-wide halos and orography files with +# 0-cell-wide (i.e. no halo) and 3-cell-wide halos. For this reason, we +# refer to the original halo that then gets shaved down as the "wide" +# halo, i.e. because it is wider than the 0-cell-wide, 3-cell-wide, and +# 4-cell-wide halos that we will eventually end up with. Note that the +# grid and orography files with the wide halo are only needed as intermediates +# in generating the files with 0-cell-, 3-cell-, and 4-cell-wide halos; +# they are not needed by the forecast model. Usually, there is no reason +# to change this parameter from its default value set here. # -# k_grid_param: -# The "k" parameter used in the Jim Purser map projection/grid genera- -# tion method. +# NOTE: Probably don't need to make this a user-specified variable. +# Just set it in the function set_gridparams_JPgrid.sh. # -#----------------------------------------------------------------------- +# JPgrid_ALPHA_PARAM: +# The alpha parameter used in the Jim Purser map projection/grid generation +# method. # -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=35.5 - delx="3000.0" - dely="3000.0" - nx_T7=1000 - ny_T7=1000 - nhw_T7=6 - a_grid_param="0.21423" - k_grid_param="-0.23209" - -fi +# JPgrid_KAPPA_PARAM: +# The kappa parameter used in the Jim Purser map projection/grid generation +# method. # #----------------------------------------------------------------------- # -# Set PREDEF_GRID_NAME. This variable specifies a predefined (regional) -# domain, as follows: -# -# * If PREDEF_GRID_NAME is set to an empty string, the grid configuration -# parameters set below are used to generate a grid. -# -# * If PREDEF_GRID_NAME is set to a valid non-empty string, the grid confi- -# guration parameters set below are overwritten by predefined values -# in order to generate a predefined grid. Valid non-empty values for -# PREDEF_GRID_NAME currently consist of: -# -# "RAP" -# "HRRR" -# "EMCCONUS" -# -# These result in regional grids that cover (as closely as possible) -# the domains used in the WRF/ARW-based RAP and HRRR models, respec- -# tively. +JPgrid_LON_CTR="-97.5" +JPgrid_LAT_CTR="35.5" +JPgrid_DELX="3000.0" +JPgrid_DELY="3000.0" +JPgrid_NX="1000" +JPgrid_NY="1000" +JPgrid_WIDE_HALO_WIDTH="6" +JPgrid_ALPHA_PARAM="0.21423" +JPgrid_KAPPA_PARAM="-0.23209" # #----------------------------------------------------------------------- # -PREDEF_GRID_NAME="" +# Set DT_ATMOS. This is the main forecast model integraton time step. +# As described in the forecast model documentation, "It corresponds to +# the frequency with which the top level routine in the dynamics is called +# as well as the frequency with which the physics is called." # #----------------------------------------------------------------------- # -# Set the model integraton time step dt_atmos. This is the time step -# for the largest atmosphere model loop. It corresponds to the frequen- -# cy with which the top level routine in the dynamics is called as well -# as the frequency with which the physics is called. +DT_ATMOS="18" # #----------------------------------------------------------------------- # -dt_atmos=18 #Preliminary values: 18 for 3-km runs, 90 for 13-km runs +# Set LAYOUT_X and LAYOUT_Y. These are the number of MPI tasks (processes) +# to use in the two horizontal directions (x and y) of the regional grid +# when running the forecast model. # #----------------------------------------------------------------------- # -# Set PREEXISTING_DIR_METHOD. This variable determines the strategy to -# use to deal with preexisting experiment and/or work directories (e.g -# ones generated by previous experiments). This variable must be set to -# one of "delete", "rename", and "quit". The resulting behavior for -# each of these values is as follows: -# -# * "delete": -# The preexisting directory is deleted and a new directory (having the -# same name as the original preexisting directory) is created. -# -# * "rename": -# The preexisting directory is renamed and a new directory (having the -# same name as the original preexisting directory) is created. The -# new name of the preexisting directory consists of its original name -# and the suffix "_oldNNN", where NNN is a 3-digit integer chosen to -# make the new name unique. -# -# * "quit": -# The preexisting directory is left unchanged, but execution of the -# currently running script is terminated. In this case, the preexist- -# ing directory must be dealt with manually before rerunning the -# script. -# -#----------------------------------------------------------------------- -# -PREEXISTING_DIR_METHOD="delete" -#PREEXISTING_DIR_METHOD="rename" -#PREEXISTING_DIR_METHOD="quit" -# -#----------------------------------------------------------------------- -# -# Set the flag that determines whether or not the workflow scripts tend -# to be more verbose. This must be set to "TRUE" or "FALSE". -# -#----------------------------------------------------------------------- -# -VERBOSE="TRUE" -#VERBOSE="FALSE" +LAYOUT_X="20" +LAYOUT_Y="20" # #----------------------------------------------------------------------- # -# Set the number of MPI tasks to use in the x and y directions. -# -#----------------------------------------------------------------------- -# -layout_x="20" -layout_y="20" -# -#----------------------------------------------------------------------- +# Set BLOCKSIZE. This is the amount of data that is passed into the cache +# at a time. The number of vertical columns per MPI task needs to be +# divisible by BLOCKSIZE; otherwise, unexpected results may occur. # -# Set the blocksize to use. This is the amount of data that is passed -# into the cache at a time. The number of vertical columns per MPI task -# needs to be divisible by the blocksize; otherwise, unexpected results -# may occur. +# GSK: IMPORTANT NOTE: +# I think Dom fixed the code so that the number of columns per MPI task +# no longer needs to be divisible by BLOCKSIZE. If so, remove the check +# on blocksize in the experiment generation scripts. Note that BLOCKSIZE +# still needs to be set to a value (probably machine-dependent). # #----------------------------------------------------------------------- # -blocksize="24" +BLOCKSIZE="24" # #----------------------------------------------------------------------- # # Set write-component (quilting) parameters. Definitions: # # QUILTING: -# Flag for whether or not to use the write component for output. +# Flag that determines whether or not to use the write component for +# writing output files to disk. # -# write_groups: +# WRTCMP_write_groups: # The number of write groups (i.e. groups of MPI tasks) to use in the # write component. # -# write_tasks_per_group: +# WRTCMP_write_tasks_per_group: # The number of MPI tasks to allocate for each write group. # -# print_esmf: +# PRINT_ESMF: # Flag for whether or not to output extra (debugging) information from # ESMF routines. Must be ".true." or ".false.". Note that the write # component uses ESMF library routines to interpolate from the native -# FV3SAR grid to the user-specified output grid (which is defined in the +# forecast model grid to the user-specified output grid (which is defined in the # model configuration file MODEL_CONFIG_FN in the forecast's run direc- # tory). # #----------------------------------------------------------------------- # QUILTING="TRUE" -print_esmf=".false." +PRINT_ESMF="FALSE" WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="20" + WRTCMP_output_grid="''" WRTCMP_cen_lon="" WRTCMP_cen_lat="" @@ -647,46 +631,147 @@ WRTCMP_dlat="" # The following are used only for the case of WRTCMP_output_grid set to # "'lambert_conformal'". # -WRTCMP_cen_lon="" -WRTCMP_cen_lat="" WRTCMP_stdlat1="" WRTCMP_stdlat2="" WRTCMP_nx="" WRTCMP_ny="" WRTCMP_dx="" WRTCMP_dy="" - - # #----------------------------------------------------------------------- # +# Set PREDEF_GRID_NAME. This parameter specifies a predefined regional +# grid, as follows: +# +# * If PREDEF_GRID_NAME is set to an empty string, the grid parameters, +# time step (DT_ATMOS), computational parameters (e.g. LAYOUT_X, LAYOUT_Y), +# and write component parameters set above (and possibly overwritten by +# values in the user-specified configuration file) are used. +# +# * If PREDEF_GRID_NAME is set to a valid grid name, the grid parameters, +# time step (DT_ATMOS), computational parameters (e.g. LAYOUT_X, LAYOUT_Y), +# and write component parameters set above (and possibly overwritten by +# values in the user-specified configuration file) are overwritten by +# predefined values for the specified grid. # +# This is simply a convenient way to quickly specify a set of parameters +# that depend on the grid. # #----------------------------------------------------------------------- # -RUN_TASK_MAKE_GRID="TRUE" -GRID_DIR="/path/to/pregenerated/grid/files" - -RUN_TASK_MAKE_OROG="TRUE" -OROG_DIR="/path/to/pregenerated/orog/files" +PREDEF_GRID_NAME="" # #----------------------------------------------------------------------- # +# Set EMC_GRID_NAME. This is a convenience parameter to allow EMC to use +# its original grid names. It is simply used to determine a value for +# PREDEF_GRID_NAME. Once EMC starts using PREDEF_GRID_NAME, this variable +# can be eliminated. +# +#----------------------------------------------------------------------- # +EMC_GRID_NAME="" # #----------------------------------------------------------------------- # +# Set PREEXISTING_DIR_METHOD. This variable determines the method to use +# use to deal with preexisting directories [e.g ones generated by previous +# calls to the experiment generation script using the same experiment name +# (EXPT_SUBDIR) as the current experiment]. This variable must be set to +# one of "delete", "rename", and "quit". The resulting behavior for each +# of these values is as follows: +# +# * "delete": +# The preexisting directory is deleted and a new directory (having the +# same name as the original preexisting directory) is created. +# +# * "rename": +# The preexisting directory is renamed and a new directory (having the +# same name as the original preexisting directory) is created. The new +# name of the preexisting directory consists of its original name and +# the suffix "_oldNNN", where NNN is a 3-digit integer chosen to make +# the new name unique. +# +# * "quit": +# The preexisting directory is left unchanged, but execution of the +# currently running script is terminated. In this case, the preexisting +# directory must be dealt with manually before rerunning the script. +# +#----------------------------------------------------------------------- +# +PREEXISTING_DIR_METHOD="delete" +# +#----------------------------------------------------------------------- +# +# Set VERBOSE. This is a flag that determines whether or not the experiment +# generation and workflow task scripts tend to be print out more informational +# messages. +# +#----------------------------------------------------------------------- +# +VERBOSE="TRUE" +# +#----------------------------------------------------------------------- +# +# Set flags (and related directories) that determine whether the grid, +# orography, and/or surface climatology file generation tasks should be +# run. Note that these are all cycle-independent tasks, i.e. if they are +# to be run, they do so only once at the beginning of the workflow before +# any cycles are run. Definitions: +# +# RUN_TASK_MAKE_GRID: +# Flag that determines whether the grid file generation task is to be run. +# If this is set to "TRUE", the grid generation task is run and new grid +# files are generated. If it is set to "FALSE", then the scripts look +# for pregenerated grid files in the directory specified by GRID_DIR (see +# below). +# +# GRID_DIR: +# The directory in which to look for pregenerated grid files if +# RUN_TASK_MAKE_GRID is set to "FALSE". +# +# RUN_TASK_MAKE_OROG: +# Same as RUN_TASK_MAKE_GRID but for the orography generation task. +# +# OROG_DIR: +# Same as GRID_DIR but for the orogrpahy generation task. +# +# RUN_TASK_MAKE_SFC_CLIMO: +# Same as RUN_TASK_MAKE_GRID but for the surface climatology generation +# task. +# +# SFC_CLIMO_DIR: +# Same as GRID_DIR but for the surface climatology generation task. +# +#----------------------------------------------------------------------- +# +RUN_TASK_MAKE_GRID="TRUE" +GRID_DIR="/path/to/pregenerated/grid/files" + +RUN_TASK_MAKE_OROG="TRUE" +OROG_DIR="/path/to/pregenerated/orog/files" + RUN_TASK_MAKE_SFC_CLIMO="TRUE" SFC_CLIMO_DIR="/path/to/pregenerated/surface/climo/files" - # #----------------------------------------------------------------------- # -# +# Set the arrays that specify the file names in the system and experiment's +# FIXam directories. Definitions: +# +# FIXgsm_FILENAMES: +# This array contains the names of the fixed files in the system's FIXgsm +# directory that the experiment generation script will either copy or +# create links to. +# +# FIXam_FILENAMES: +# This array contains the names of the files in the local FIXam directory +# that are either copies of or symlinks to the files listed in the +# FIXgsm_FILENAMES array in the FIXgsm directory. # #----------------------------------------------------------------------- # -FIXam_FILES_SYSDIR=( \ +FIXgsm_FILENAMES=( \ "CFSR.SEAICE.1982.2012.monthly.clim.grb" \ "RTGSST.1982.2012.monthly.clim.grb" \ "seaice_newland.grb" \ @@ -724,7 +809,7 @@ FIXam_FILES_SYSDIR=( \ ) # "global_o3prdlos.f77" \ -FIXam_FILES_EXPTDIR=( \ +FIXam_FILENAMES=( \ "CFSR.SEAICE.1982.2012.monthly.clim.grb" \ "RTGSST.1982.2012.monthly.clim.grb" \ "seaice_newland.grb" \ diff --git a/ush/count_files.sh b/ush/count_files.sh deleted file mode 100755 index 55d269082f..0000000000 --- a/ush/count_files.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# This function returns the number of files in the current directory -# that end with the specified extension (file_extension). -# -#----------------------------------------------------------------------- -# -. ./source_funcs.sh - -function count_files() { - - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} file_extension - -where file_extension is the file extension to use for counting files. -The file count returned will be equal to the number of files in the cur- -rent directory that end with \".${file_extension}\"." - fi - - local file_extension="$1" - local glob_pattern="*.${file_extension}" - local num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) - print_info_msg "${num_files}" - -} - - diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 53682600b5..d3dc0dcdd7 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -1,20 +1,51 @@ -#!/bin/sh -l +#!/bin/bash -l -# These need to be made machine-dependent. The following work only on -# Hera. -module load intel/19.0.4.243 -module load netcdf/4.7.0 - - -ushdir=$(pwd) # #----------------------------------------------------------------------- # -# Source function definition files. +# This file defines and then calls a function that sets up a forecast +# experiment and creates a workflow (according to the parameters speci- +# fied in the configuration file; see instructions). +# +#----------------------------------------------------------------------- +# +function generate_FV3SAR_wflow() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -. $ushdir/source_funcs.sh +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Set directories. +# +#----------------------------------------------------------------------- +# +ushdir="${scrfunc_dir}" +# +#----------------------------------------------------------------------- +# +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +. $ushdir/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -24,18 +55,15 @@ ushdir=$(pwd) #----------------------------------------------------------------------- # { save_shell_opts; set -u +x; } > /dev/null 2>&1 - - -script_name=$( basename "${BASH_SOURCE[0]}" ) # #----------------------------------------------------------------------- # -# Source the setup script. Note that this in turn sources the configu- -# ration file/script (config.sh) in the current directory. It also cre- -# ates the run and work directories, the INPUT and RESTART subdirecto- -# ries under the run directory, and a variable definitions file/script -# in the run directory. The latter gets sources by each of the scripts -# that run the various workflow tasks. +# Source the file that defines and then calls the setup function. The +# setup function in turn first sources the default configuration file +# (which contains default values for the experiment/workflow parameters) +# and then sources the user-specified configuration file (which contains +# user-specified values for a subset of the experiment/workflow parame- +# ters that override their default values). # #----------------------------------------------------------------------- # @@ -49,8 +77,8 @@ script_name=$( basename "${BASH_SOURCE[0]}" ) # #----------------------------------------------------------------------- # -TEMPLATE_XML_FP="$TEMPLATE_DIR/$WFLOW_XML_FN" -WFLOW_XML_FP="$EXPTDIR/$WFLOW_XML_FN" +TEMPLATE_XML_FP="${TEMPLATE_DIR}/${WFLOW_XML_FN}" +WFLOW_XML_FP="$EXPTDIR/${WFLOW_XML_FN}" # #----------------------------------------------------------------------- # @@ -58,7 +86,7 @@ WFLOW_XML_FP="$EXPTDIR/$WFLOW_XML_FN" # #----------------------------------------------------------------------- # -cp_vrfy $TEMPLATE_XML_FP $WFLOW_XML_FP +cp_vrfy ${TEMPLATE_XML_FP} ${WFLOW_XML_FP} # #----------------------------------------------------------------------- # @@ -67,7 +95,7 @@ cp_vrfy $TEMPLATE_XML_FP $WFLOW_XML_FP # #----------------------------------------------------------------------- # -PROC_RUN_FV3="${NUM_NODES}:ppn=${ncores_per_node}" +PROC_RUN_FCST="${NUM_NODES}:ppn=${NCORES_PER_NODE}" FHR=( $( seq 0 1 ${FCST_LEN_HRS} ) ) i=0 @@ -75,9 +103,9 @@ FHR_STR=$( printf "%02d" "${FHR[i]}" ) numel=${#FHR[@]} for i in $(seq 1 $(($numel-1)) ); do hour=$( printf "%02d" "${FHR[i]}" ) - FHR_STR="$FHR_STR $hour" + FHR_STR="${FHR_STR} $hour" done -FHR="$FHR_STR" +FHR="${FHR_STR}" # #----------------------------------------------------------------------- # @@ -89,37 +117,66 @@ FHR="$FHR_STR" # CDATE_generic="@Y@m@d@H" if [ "${RUN_ENVIR}" = "nco" ]; then - CYCLE_DIR="$STMP/tmpnwprd/${PREDEF_GRID_NAME}_${CDATE_generic}" + CYCLE_DIR="$STMP/tmpnwprd/${EMC_GRID_NAME}_${CDATE_generic}" else CYCLE_DIR="$EXPTDIR/${CDATE_generic}" fi - -set_file_param "$WFLOW_XML_FP" "SCRIPT_VAR_DEFNS_FP" "$SCRIPT_VAR_DEFNS_FP" -set_file_param "$WFLOW_XML_FP" "CYCLE_DIR" "${CYCLE_DIR}" -set_file_param "$WFLOW_XML_FP" "ACCOUNT" "$ACCOUNT" -set_file_param "$WFLOW_XML_FP" "SCHED" "$SCHED" -set_file_param "$WFLOW_XML_FP" "QUEUE_DEFAULT" "$QUEUE_DEFAULT" -set_file_param "$WFLOW_XML_FP" "QUEUE_HPSS" "$QUEUE_HPSS" -set_file_param "$WFLOW_XML_FP" "QUEUE_FCST" "$QUEUE_FCST" -set_file_param "$WFLOW_XML_FP" "USHDIR" "$USHDIR" -set_file_param "$WFLOW_XML_FP" "JOBSDIR" "$JOBSDIR" -set_file_param "$WFLOW_XML_FP" "EXPTDIR" "$EXPTDIR" -set_file_param "$WFLOW_XML_FP" "LOGDIR" "$LOGDIR" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_NAME_ICS" "$EXTRN_MDL_NAME_ICS" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_NAME_LBCS" "$EXTRN_MDL_NAME_LBCS" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_FILES_SYSBASEDIR_ICS" "$EXTRN_MDL_FILES_SYSBASEDIR_ICS" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_FILES_SYSBASEDIR_LBCS" "$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" -set_file_param "$WFLOW_XML_FP" "PROC_RUN_FV3" "$PROC_RUN_FV3" -set_file_param "$WFLOW_XML_FP" "DATE_FIRST_CYCL" "$DATE_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "DATE_LAST_CYCL" "$DATE_LAST_CYCL" -set_file_param "$WFLOW_XML_FP" "YYYY_FIRST_CYCL" "$YYYY_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "MM_FIRST_CYCL" "$MM_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "DD_FIRST_CYCL" "$DD_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "HH_FIRST_CYCL" "$HH_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "FHR" "$FHR" -set_file_param "$WFLOW_XML_FP" "RUN_TASK_MAKE_GRID" "$RUN_TASK_MAKE_GRID" -set_file_param "$WFLOW_XML_FP" "RUN_TASK_MAKE_OROG" "$RUN_TASK_MAKE_OROG" -set_file_param "$WFLOW_XML_FP" "RUN_TASK_MAKE_SFC_CLIMO" "$RUN_TASK_MAKE_SFC_CLIMO" +# +# Computational resource parameters. +# +set_file_param "${WFLOW_XML_FP}" "ACCOUNT" "$ACCOUNT" +set_file_param "${WFLOW_XML_FP}" "SCHED" "$SCHED" +set_file_param "${WFLOW_XML_FP}" "QUEUE_DEFAULT" "${QUEUE_DEFAULT}" +set_file_param "${WFLOW_XML_FP}" "QUEUE_HPSS" "${QUEUE_HPSS}" +set_file_param "${WFLOW_XML_FP}" "QUEUE_FCST" "${QUEUE_FCST}" +set_file_param "${WFLOW_XML_FP}" "PROC_RUN_FCST" "${PROC_RUN_FCST}" +# +# Directories. +# +set_file_param "${WFLOW_XML_FP}" "USHDIR" "$USHDIR" +set_file_param "${WFLOW_XML_FP}" "JOBSDIR" "$JOBSDIR" +set_file_param "${WFLOW_XML_FP}" "EXPTDIR" "$EXPTDIR" +set_file_param "${WFLOW_XML_FP}" "LOGDIR" "$LOGDIR" +set_file_param "${WFLOW_XML_FP}" "CYCLE_DIR" "${CYCLE_DIR}" +# +# Files. +# +set_file_param "${WFLOW_XML_FP}" "GLOBAL_VAR_DEFNS_FP" "${GLOBAL_VAR_DEFNS_FP}" +# +# External model information. +# +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_NAME_ICS" "${EXTRN_MDL_NAME_ICS}" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_NAME_LBCS" "${EXTRN_MDL_NAME_LBCS}" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_FILES_SYSBASEDIR_ICS" "${EXTRN_MDL_FILES_SYSBASEDIR_ICS}" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_FILES_SYSBASEDIR_LBCS" "${EXTRN_MDL_FILES_SYSBASEDIR_LBCS}" +# +# Cycle-specific information. +# +set_file_param "${WFLOW_XML_FP}" "DATE_FIRST_CYCL" "${DATE_FIRST_CYCL}" +set_file_param "${WFLOW_XML_FP}" "DATE_LAST_CYCL" "${DATE_LAST_CYCL}" +set_file_param "${WFLOW_XML_FP}" "YYYY_FIRST_CYCL" "${YYYY_FIRST_CYCL}" +set_file_param "${WFLOW_XML_FP}" "MM_FIRST_CYCL" "${MM_FIRST_CYCL}" +set_file_param "${WFLOW_XML_FP}" "DD_FIRST_CYCL" "${DD_FIRST_CYCL}" +set_file_param "${WFLOW_XML_FP}" "HH_FIRST_CYCL" "${HH_FIRST_CYCL}" +set_file_param "${WFLOW_XML_FP}" "FHR" "$FHR" +# +# Rocoto workflow task names. +# +set_file_param "${WFLOW_XML_FP}" "MAKE_GRID_TN" "${MAKE_GRID_TN}" +set_file_param "${WFLOW_XML_FP}" "MAKE_OROG_TN" "${MAKE_OROG_TN}" +set_file_param "${WFLOW_XML_FP}" "MAKE_SFC_CLIMO_TN" "${MAKE_SFC_CLIMO_TN}" +set_file_param "${WFLOW_XML_FP}" "GET_EXTRN_ICS_TN" "${GET_EXTRN_ICS_TN}" +set_file_param "${WFLOW_XML_FP}" "GET_EXTRN_LBCS_TN" "${GET_EXTRN_LBCS_TN}" +set_file_param "${WFLOW_XML_FP}" "MAKE_ICS_TN" "${MAKE_ICS_TN}" +set_file_param "${WFLOW_XML_FP}" "MAKE_LBCS_TN" "${MAKE_LBCS_TN}" +set_file_param "${WFLOW_XML_FP}" "RUN_FCST_TN" "${RUN_FCST_TN}" +set_file_param "${WFLOW_XML_FP}" "RUN_POST_TN" "${RUN_POST_TN}" +# +# Flags that determine whether or not certain tasks are launched. +# +set_file_param "${WFLOW_XML_FP}" "RUN_TASK_MAKE_GRID" "${RUN_TASK_MAKE_GRID}" +set_file_param "${WFLOW_XML_FP}" "RUN_TASK_MAKE_OROG" "${RUN_TASK_MAKE_OROG}" +set_file_param "${WFLOW_XML_FP}" "RUN_TASK_MAKE_SFC_CLIMO" "${RUN_TASK_MAKE_SFC_CLIMO}" # #----------------------------------------------------------------------- # @@ -144,13 +201,13 @@ regex_search="(^\s*)(\&DATE_FIRST_CYCL;)(CC00)( i=0 for cycl in "${CYCL_HRS[@]}"; do regex_replace="\1${cycl}\3\4${cycl}00 \7${cycl}00\9" - crnt_line=$( sed -n -r -e "s%$regex_search%$regex_replace%p" "$WFLOW_XML_FP" ) + crnt_line=$( sed -n -r -e "s%${regex_search}%${regex_replace}%p" "${WFLOW_XML_FP}" ) if [ "$i" -eq "0" ]; then all_cycledefs="${crnt_line}" else all_cycledefs=$( printf "%s\n%s" "${all_cycledefs}" "${crnt_line}" ) fi - i=$(( $i+1 )) + i=$((i+1)) done # # Replace all actual newlines in the variable all_cycledefs with back- @@ -169,189 +226,260 @@ all_cycledefs=${all_cycledefs//&/\\\&} # # Perform the subsutitution. # -sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "$WFLOW_XML_FP" -# -#----------------------------------------------------------------------- -# -# Save the current shell options, turn off the xtrace option, load the -# rocoto module, then restore the original shell options. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; set +x; } > /dev/null 2>&1 -module load rocoto/1.3.1 -{ restore_shell_opts; } > /dev/null 2>&1 +sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "${WFLOW_XML_FP}" + + # #----------------------------------------------------------------------- # -# For convenience, print out the commands that needs to be issued on the -# command line in order to launch the workflow and to check its status. -# Also, print out the command that should be placed in the user's cron- -# tab in order for the workflow to be continually resubmitted. +# For select workflow tasks, create symlinks (in an appropriate subdi- +# rectory under the workflow directory tree) that point to module files +# in the various cloned external repositories. In principle, this is +# better than having hard-coded module files for tasks because the sym- +# links will always point to updated module files. However, it does re- +# quire that these module files in the external repositories be coded +# correctly, e.g. that they really be lua module files and not contain +# any shell commands (like "export SOME_VARIABLE"). # #----------------------------------------------------------------------- # -WFLOW_DB_FN="${WFLOW_XML_FN%.xml}.db" -load_rocoto_cmd="module load rocoto/1.3.1" -rocotorun_cmd="rocotorun -w ${WFLOW_XML_FN} -d ${WFLOW_DB_FN} -v 10" -rocotostat_cmd="rocotostat -w ${WFLOW_XML_FN} -d ${WFLOW_DB_FN} -v 10" - -print_info_msg "\ -======================================================================== -======================================================================== - -Workflow generation completed. - -======================================================================== -======================================================================== - -The experiment directory is: - - > EXPTDIR=\"$EXPTDIR\" - -To launch the workflow, first ensure that you have a compatible version -of rocoto loaded. For example, on theia, the following version has been -tested and works: - - > ${load_rocoto_cmd} - -(Later versions may also work but have not been tested.) To launch the -workflow, change location to the experiment directory (EXPTDIR) and is- -sue the rocotrun command, as follows: - - > cd $EXPTDIR - > ${rocotorun_cmd} - -To check on the status of the workflow, issue the rocotostat command -(also from the experiment directory): - - > ${rocotostat_cmd} - -Note that: - -1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the next - task(s) to the queue. - -2) In order for the output of the rocotostat command to be up-to-date, - the rocotorun command must be issued immediately before the rocoto- - stat command. - -For automatic resubmission of the workflow (say every 3 minutes), the -following line can be added to the user's crontab (use \"crontab -e\" to -edit the cron table): - -*/3 * * * * cd $EXPTDIR && $rocotorun_cmd - -Done. -" +machine=${MACHINE,,} +cd_vrfy "${MODULES_DIR}/tasks/$machine" +# +# The "module" file (really a shell script) for orog in the UFS_UTILS +# repo uses a shell variable named MOD_PATH, but it is not clear where +# that is defined. That needs to be fixed. Until then, we have to use +# a hard-coded module file, which may or may not be compatible with the +# modules used in the UFS_UTILS repo to build the orog code. +#ln_vrfy -fs "${UFS_UTILS_DIR}/modulefiles/fv3gfs/orog.$machine" \ +# "${MAKE_OROG_TN}" +ln_vrfy -fs "${MAKE_OROG_TN}.hardcoded" "${MAKE_OROG_TN}" +ln_vrfy -fs "${UFS_UTILS_DIR}/modulefiles/modulefile.sfc_climo_gen.$machine" \ + "${MAKE_SFC_CLIMO_TN}" +#ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ +# "${MAKE_ICS_TN}" +#ln_vrfy -fs "${MAKE_ICS_TN}.hardcoded" "${MAKE_ICS_TN}" +cp_vrfy "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ + "${MAKE_ICS_TN}" +cat "${MAKE_ICS_TN}.local" >> "${MAKE_ICS_TN}" +#ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ +# "${MAKE_LBCS_TN}" +#ln_vrfy -fs "${MAKE_LBCS_TN}.hardcoded" "${MAKE_LBCS_TN}" +cp_vrfy "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ + "${MAKE_LBCS_TN}" +cat "${MAKE_LBCS_TN}.local" >> "${MAKE_LBCS_TN}" +ln_vrfy -fs "${UFS_WTHR_MDL_DIR}/NEMS/src/conf/modules.nems" \ + "${RUN_FCST_TN}" +cd_vrfy - # #----------------------------------------------------------------------- # +# Make sure that the correct ozone production/loss fixed file is speci- +# fied in the array FIXgsm_FILENAMES. There should be two such files +# on disk in the system directory specified in FIXgsm. They are named +# +# ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 +# +# and +# +# global_o3prdlos.f77 # +# The first should be used with the 2015 ozone parameterization, while +# the second should be used with the more recent ozone parameterization +# (referred to here as the after-2015 parameterization). +# +# Which of these should be used depends on the specified physics suite +# (CCPP_PHYS_SUITE). The GFS physics suite uses the after-2015 parame- +# terization, while the GSD physics suite uses the 2015 parameteriza- +# tion. Thus, we must ensure that the ozone production/loss fixed file +# listed in the array FIXgsm_FILENAMES is the correct one for the gi- +# ven physics suite. We do this below as follows. +# +# First, note that FIXgsm_FILENAMES should contain the name of exactly +# one of the ozone production/loss fixed files listed above. We verify +# this by trying to obtain the indices of the elements of FIXam_FILES_- +# SYSDIR that contain the two files. One of these indices should not +# exist while the other one should. If the 2015 file is the one that is +# found in FIXgsm_FILENAMES, then if we're using GFS physics, we +# change that element in FIXgsm_FILENAMES to the name of the after- +# 2015 file. Similarly, if the after-2015 file is the one that is found +# in FIXgsm_FILENAMES, then if we're using GSD physics, we change that +# element in FIXgsm_FILENAMES to the name of the 2015 file. If +# neither file or more than one ozone production/loss file is found in +# FIXgsm_FILENAMES, we print out an error message and exit. # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ]; then +ozphys_2015_fn="ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" +indx_ozphys_2015=$( get_elem_inds "FIXgsm_FILENAMES" "${ozphys_2015_fn}" ) +read -a indx_ozphys_2015 <<< ${indx_ozphys_2015} +num_files_ozphys_2015=${#indx_ozphys_2015[@]} - glob_pattern="C*_mosaic.nc" - cd_vrfy $FIXsar - num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) - - if [ "${num_files}" -ne "1" ]; then - print_err_msg_exit "${script_name}" "\ -Exactly one file must exist in directory FIXsar matching the globbing -pattern glob_pattern: - FIXsar = \"${FIXsar}\" - glob_pattern = \"${glob_pattern}\" - num_files = \"${num_files}\"" - fi +ozphys_after2015_fn="global_o3prdlos.f77" +indx_ozphys_after2015=$( get_elem_inds "FIXgsm_FILENAMES" "${ozphys_after2015_fn}" ) +read -a indx_ozphys_after2015 <<< ${indx_ozphys_after2015} +num_files_ozphys_after2015=${#indx_ozphys_after2015[@]} - fn=$( ls -1 ${glob_pattern} ) - RES=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*)_mosaic.nc/\1/p" ) - CRES="C$RES" -echo "RES = $RES" +if [ ${num_files_ozphys_2015} -eq 1 ] && \ + [ ${num_files_ozphys_after2015} -eq 0 ]; then -# RES_equiv=$( ncdump -h "${grid_fn}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") -# RES_equiv=${RES_equiv//$'\n'/} -#printf "%s\n" "RES_equiv = $RES_equiv" -# CRES_equiv="C${RES_equiv}" -#printf "%s\n" "CRES_equiv = $CRES_equiv" -# -# RES="$RES_equiv" -# CRES="$CRES_equiv" + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then + FIXgsm_FILENAMES[${indx_ozphys_2015}]="${ozphys_after2015_fn}" + fi - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "${RES}" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "${CRES}" +elif [ ${num_files_ozphys_2015} -eq 0 ] && \ + [ ${num_files_ozphys_after2015} -eq 1 ]; then + + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + FIXgsm_FILENAMES[${indx_ozphys_after2015}]="${ozphys_2015_fn}" + fi else + + FIXgsm_FILENAMES_str=$( printf "\"%s\"\n" "${FIXgsm_FILENAMES[@]}" ) + print_err_msg_exit "\ +The array FIXgsm_FILENAMES containing the names of the fixed files in +the system directory (FIXgsm) to copy or link to has been specified in- +correctly because it contains no or more than one occurrence of the +ozone production/loss file(s) (whose names are specified in the varia- +bles ozphys_2015_fn and ozphys_after2015_fn): + FIXgsm = \"${FIXgsm}\" + ozphys_2015_fn = \"${ozphys_2015_fn}\" + num_files_ozphys_2015_fn = \"${num_files_ozphys_2015_fn}\" + ozphys_after2015_fn = \"${ozphys_after2015_fn}\" + num_files_ozphys_after2015_fn = \"${num_files_ozphys_after2015_fn}\" + FIXgsm_FILENAMES = +( +${FIXgsm_FILENAMES_str} +) +Please check the contents of the FIXgsm_FILENAMES array and rerun." + +fi # #----------------------------------------------------------------------- # -# If the grid file generation task in the workflow is going to be -# skipped (because pregenerated files are available), create links in -# the FIXsar directory to the pregenerated grid files. +# Copy the workflow (re)launch script to the experiment directory. # #----------------------------------------------------------------------- # - if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ - verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ - file_group="grid" || \ - print_err_msg_exit "\ -Call to script to create links to grid files failed. -" - fi +print_info_msg " +Creating symlink in the experiment directory (EXPTDIR) to the workflow +launch script (WFLOW_LAUNCH_SCRIPT_FP): + EXPTDIR = \"${EXPTDIR}\" + WFLOW_LAUNCH_SCRIPT_FP = \"${WFLOW_LAUNCH_SCRIPT_FP}\"" +ln_vrfy -fs "${WFLOW_LAUNCH_SCRIPT_FP}" "$EXPTDIR" # #----------------------------------------------------------------------- # -# If the orography file generation task in the workflow is going to be -# skipped (because pregenerated files are available), create links in -# the FIXsar directory to the pregenerated orography files. +# If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's cron +# table to call the (re)launch script every CRON_RELAUNCH_INTVL_MNTS mi- +# nutes. # #----------------------------------------------------------------------- # - if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ - verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ - file_group="orog" || \ - print_err_msg_exit "\ -Call to script to create links to orography files failed. -" - fi +if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then # -#----------------------------------------------------------------------- +# Make a backup copy of the user's crontab file and save it in a file. # -# If the surface climatology file generation task in the workflow is -# going to be skipped (because pregenerated files are available), create -# links in the FIXsar directory to the pregenerated surface climatology -# files. + time_stamp=$( date "+%F_%T" ) + crontab_backup_fp="$EXPTDIR/crontab.bak.${time_stamp}" + print_info_msg " +Copying contents of user cron table to backup file: + crontab_backup_fp = \"${crontab_backup_fp}\"" + crontab -l > ${crontab_backup_fp} # -#----------------------------------------------------------------------- +# Below, we use "grep" to determine whether the crontab line that the +# variable CRONTAB_LINE contains is already present in the cron table. +# For that purpose, we need to escape the asterisks in the string in +# CRONTAB_LINE with backslashes. Do this next. # - if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ - verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ - file_group="sfc_climo" || \ - print_err_msg_exit "\ -Call to script to create links to surface climatology files failed. -" - fi + crontab_line_esc_astr=$( printf "%s" "${CRONTAB_LINE}" | \ + sed -r -e "s%[*]%\\\\*%g" ) +# +# In the grep command below, the "^" at the beginning of the string be- +# ing passed to grep is a start-of-line anchor while the "$" at the end +# of the string is an end-of-line anchor. Thus, in order for grep to +# find a match on any given line of the output of "crontab -l", that +# line must contain exactly the string in the variable crontab_line_- +# esc_astr without any leading or trailing characters. This is to eli- +# minate situations in which a line in the output of "crontab -l" con- +# tains the string in crontab_line_esc_astr but is precedeeded, for ex- +# ample, by the comment character "#" (in which case cron ignores that +# line) and/or is followed by further commands that are not part of the +# string in crontab_line_esc_astr (in which case it does something more +# than the command portion of the string in crontab_line_esc_astr does). +# + grep_output=$( crontab -l | grep "^${crontab_line_esc_astr}$" ) + exit_status=$? -fi + if [ "${exit_status}" -eq 0 ]; then + + print_info_msg " +The following line already exists in the cron table and thus will not be +added: + CRONTAB_LINE = \"${CRONTAB_LINE}\"" + + else + + print_info_msg " +Adding the following line to the cron table in order to automatically +resubmit FV3SAR workflow: + CRONTAB_LINE = \"${CRONTAB_LINE}\"" + ( crontab -l; echo "${CRONTAB_LINE}" ) | crontab - + fi +fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +## Is this if-statement still necessary? +#if [ "${RUN_ENVIR}" = "nco" ]; then +# +# glob_pattern="C*_mosaic.nc" +# cd_vrfy $FIXsar +# num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) +# +# if [ "${num_files}" -ne "1" ]; then +# print_err_msg_exit "\ +#Exactly one file must exist in directory FIXsar matching the globbing +#pattern glob_pattern: +# FIXsar = \"${FIXsar}\" +# glob_pattern = \"${glob_pattern}\" +# num_files = ${num_files}" +# fi +# +# fn=$( ls -1 ${glob_pattern} ) +# RES=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*)_mosaic.nc/\1/p" ) +# CRES="C$RES" +#echo "RES = $RES" +# +## RES_equiv=$( ncdump -h "${grid_fn}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") +## RES_equiv=${RES_equiv//$'\n'/} +##printf "%s\n" "RES_equiv = $RES_equiv" +## CRES_equiv="C${RES_equiv}" +##printf "%s\n" "CRES_equiv = $CRES_equiv" +## +## RES="$RES_equiv" +## CRES="$CRES_equiv" +# +# set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${RES}" +# set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${CRES}" +# +#fi # #----------------------------------------------------------------------- # @@ -362,20 +490,20 @@ fi #----------------------------------------------------------------------- # -# For nco, we assume the following copy operation is done beforehand, but -# that can be changed. +# In NCO mode, we assume the following copy operation is done beforehand, +# but that can be changed. if [ "${RUN_ENVIR}" != "nco" ]; then - print_info_msg_verbose "\ -Copying fixed files from system directory to the workflow directory..." + print_info_msg "$VERBOSE" " +Copying fixed files from system directory to the experiment directory..." check_for_preexist_dir $FIXam "delete" mkdir -p $FIXam cp_vrfy $FIXgsm/global_hyblev.l65.txt $FIXam for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - cp_vrfy $FIXgsm/${FIXam_FILES_SYSDIR[$i]} \ - $FIXam/${FIXam_FILES_EXPTDIR[$i]} + cp_vrfy $FIXgsm/${FIXgsm_FILENAMES[$i]} \ + $FIXam/${FIXam_FILENAMES[$i]} done fi @@ -386,257 +514,304 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose " +print_info_msg "$VERBOSE" " Copying templates of various input files to the experiment directory..." + +print_info_msg "$VERBOSE" " + Copying the template data table file to the experiment directory..." +cp_vrfy "${DATA_TABLE_TMPL_FP}" "${DATA_TABLE_FP}" + +print_info_msg "$VERBOSE" " + Copying the template field table file to the experiment directory..." +cp_vrfy "${FIELD_TABLE_TMPL_FP}" "${FIELD_TABLE_FP}" + +print_info_msg "$VERBOSE" " + Copying the template FV3 namelist file to the experiment directory..." +cp_vrfy "${FV3_NML_TMPL_FP}" "${FV3_NML_FP}" + +print_info_msg "$VERBOSE" " + Copying the template NEMS configuration file to the experiment direct- + ory..." +cp_vrfy "${NEMS_CONFIG_TMPL_FP}" "${NEMS_CONFIG_FP}" # -#----------------------------------------------------------------------- -# -# If using CCPP... +# If using CCPP ... # -# If USE_CCPP is set to "TRUE", copy the appropriate modulefile, the -# CCPP physics suite definition file (an XML file), and possibly other -# suite-dependent files to the experiment directory. +if [ "${USE_CCPP}" = "TRUE" ]; then # -# The modulefile modules.nems in the directory +# Copy the CCPP physics suite definition file from its location in the +# clone of the FV3 code repository to the experiment directory (EXPT- +# DIR). # -# $NEMSfv3gfs_DIR/NEMS/src/conf + print_info_msg "$VERBOSE" " +Copying the CCPP physics suite definition XML file from its location in +the forecast model directory sturcture to the experiment directory..." + cp_vrfy "${CCPP_PHYS_SUITE_IN_CCPP_FP}" "${CCPP_PHYS_SUITE_FP}" # -# is generated during the FV3 build process and this is configured pro- -# perly for the machine, shell environment, etc. Thus, we can just copy -# it to the experiment directory without worrying about what machine -# we're on, but this still needs to be confirmed. +# If using the GSD_v0 or GSD_SAR physics suite, copy the fixed file con- +# taining cloud condensation nuclei (CCN) data that is needed by the +# Thompson microphysics parameterization to the experiment directory. # -# Note that a modulefile is a file whose first line is the "magic coo- -# kie" '#%Module'. It is interpreted by the "module load ..." command. -# It sets environment variables (including prepending/appending to -# paths) and loads modules. + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + print_info_msg "$VERBOSE" " +Copying the fixed file containing cloud condensation nuclei (CCN) data +(needed by the Thompson microphysics parameterization) to the experiment +directory..." + cp_vrfy "$FIXgsd/CCN_ACTIVATE.BIN" "$EXPTDIR" + fi + +fi # -# QUESTION: -# Why don't we do this for the non-CCPP version of FV3? +#----------------------------------------------------------------------- # -# ANSWER: -# Because for that case, we load different versions of intel and impi -# (compare modules.nems to the modules loaded for the case of USE_CCPP -# set to "FALSE" in run_FV3SAR.sh). Maybe these can be combined at some -# point. Note that a modules.nems file is generated in the same rela- -# tive location in the non-CCPP-enabled version of NEMSfv3gfs, so maybe -# that can be used and the run_FV3SAR.sh script modified to accomodate -# such a change. That way the below can be performed for both the CCPP- -# enabled and non-CCPP-enabled versions of NEMSfv3gfs. +# Set parameters in the FV3SAR namelist file. # #----------------------------------------------------------------------- # -if [ "${USE_CCPP}" = "TRUE" ]; then -# -# Copy the shell script that initializes the Lmod (Lua-based module) -# system/software for handling modules. This script: +print_info_msg "$VERBOSE" " +Setting parameters in FV3 namelist file (FV3_NML_FP): + FV3_NML_FP = \"${FV3_NML_FP}\"" # -# 1) Detects the shell in which it is being invoked (i.e. the shell of -# the "parent" script in which it is being sourced). -# 2) Detects the machine it is running on and and calls the appropriate -# (shell- and machine-dependent) initalization script to initialize -# Lmod. -# 3) Purges all modules. -# 4) Uses the "module use ..." command to prepend or append paths to -# Lmod's search path (MODULEPATH). +# Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. +# These need to be set in the FV3SAR Fortran namelist file. They repre- +# sent the number of cell vertices in the x and y directions on the re- +# gional grid. # - print_info_msg_verbose " -Copying the shell script that initializes the Lmod (Lua-based module) -system/software for handling modules..." +npx=$((NX+1)) +npy=$((NY+1)) # -# The following might have to be made shell-dependent, e.g. if using csh -# or tcsh, copy over the file module-setup.csh.inc??. +# Set parameters. # -# It may be convenient to also copy over this script when running the -# non-CCPP version of the FV3SAR and try to simplify the run script -# (run_FV3SAR.sh) so that it doesn't depend on whether USE_CCPP is set -# to "TRUE" or "FALSE". We can do that, but currently the non-CCPP and -# CCPP-enabled versions of the FV3SAR code use different versions of -# intel and impi, so module-setup.sh must account for this. +set_file_param "${FV3_NML_FP}" "blocksize" "$BLOCKSIZE" +set_file_param "${FV3_NML_FP}" "ccpp_suite" "\'${CCPP_PHYS_SUITE}\'" +set_file_param "${FV3_NML_FP}" "layout" "${LAYOUT_X},${LAYOUT_Y}" +set_file_param "${FV3_NML_FP}" "npx" "$npx" +set_file_param "${FV3_NML_FP}" "npy" "$npy" + +set_file_param "${FV3_NML_FP}" "target_lon" "${LON_CTR}" +set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_CTR}" +# Question: +# For a JPgrid type grid, what should stretch_fac be set to? This de- +# pends on how the FV3 code uses the stretch_fac parameter in the name- +# list file. Recall that for a JPgrid, it gets set in the function +# set_gridparams_JPgrid(.sh) to something like 0.9999, but is it ok to +# set it to that here in the FV3 namelist file? +set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" +set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # - cp_vrfy ${NEMSfv3gfs_DIR}/NEMS/src/conf/module-setup.sh.inc \ - $EXPTDIR/module-setup.sh +# For the GSD_v0 and the GSD_SAR physics suites, set the parameter lsoil +# according to the external models used to obtain ICs and LBCs. +# +if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + + if [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" -o \ + "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] && \ + [ "${EXTRN_MDL_NAME_LBCS}" = "GSMGFS" -o \ + "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then + set_file_param "${FV3_NML_FP}" "lsoil" "4" + elif [ "${EXTRN_MDL_NAME_ICS}" = "RAPX" -o \ + "${EXTRN_MDL_NAME_ICS}" = "HRRRX" ] && \ + [ "${EXTRN_MDL_NAME_LBCS}" = "RAPX" -o \ + "${EXTRN_MDL_NAME_LBCS}" = "HRRRX" ]; then + set_file_param "${FV3_NML_FP}" "lsoil" "9" + else + print_err_msg_exit "\ +The value to set the variable lsoil to in the FV3 namelist file (FV3_- +NML_FP) has not been specified for the following combination of physics +suite and external models for ICs and LBCs: + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" +Please change one or more of these parameters or provide a value for +lsoil (and change workflow generation script(s) accordingly) and rerun." + fi + +fi # -# Append the command that adds the path to the CCPP libraries (via the -# shell variable LD_LIBRARY_PATH) to the Lmod initialization script in -# the experiment directory. This is needed if running the dynamic build -# of the CCPP-enabled version of the FV3SAR. +#----------------------------------------------------------------------- # - { cat << EOM >> $EXPTDIR/module-setup.sh +# To have a record of how this experiment/workflow was generated, copy +# the experiment/workflow configuration file to the experiment directo- +# ry. # -# Add path to libccpp.so and libccpphys.so to LD_LIBRARY_PATH" +#----------------------------------------------------------------------- # -export LD_LIBRARY_PATH="${NEMSfv3gfs_DIR}/ccpp/lib\${LD_LIBRARY_PATH:+:\$LD_LIBRARY_PATH}" -EOM -} || print_err_msg_exit "${script_name}" " -Heredoc (cat) command to append command to add path to CCPP libraries to -the Lmod initialization script in the experiment directory returned with -a nonzero status." - - print_info_msg_verbose " -Copying the modulefile required for running the CCPP-enabled version of -the FV3SAR under NEMS to the experiment directory..." - cp_vrfy ${NEMSfv3gfs_DIR}/NEMS/src/conf/modules.nems $EXPTDIR/modules.fv3 - +cp_vrfy $USHDIR/${EXPT_CONFIG_FN} $EXPTDIR # #----------------------------------------------------------------------- # -# If using CCPP with the GFS physics suite... +# For convenience, print out the commands that need to be issued on the +# command line in order to launch the workflow and to check its status. +# Also, print out the command that should be placed in the user's cron- +# tab in order for the workflow to be continually resubmitted. # #----------------------------------------------------------------------- # - if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then +wflow_db_fn="${WFLOW_XML_FN%.xml}.db" +rocotorun_cmd="rocotorun -w ${WFLOW_XML_FN} -d ${wflow_db_fn} -v 10" +rocotostat_cmd="rocotostat -w ${WFLOW_XML_FN} -d ${wflow_db_fn} -v 10" - print_info_msg_verbose " -Copying the FV3 namelist file for the GFS physics suite to the experi- -ment directory..." -# cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFS_FN} \ -# $EXPTDIR/${FV3_NML_FN} - if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN} \ - $EXPTDIR/${FV3_NML_FN} - fi +print_info_msg " +======================================================================== +======================================================================== - print_info_msg_verbose " -Copying the field table file for the GFS physics suite to the experiment -directory..." - cp_vrfy ${TEMPLATE_DIR}/${FIELD_TABLE_FN} \ - $EXPTDIR +Workflow generation completed. - print_info_msg_verbose " -Copying the CCPP XML file for the GFS physics suite to the experiment -directory..." - cp_vrfy ${NEMSfv3gfs_DIR}/ccpp/suites/suite_FV3_GFS_2017_gfdlmp.xml \ - $EXPTDIR/suite_FV3_GFS_2017_gfdlmp.xml +======================================================================== +======================================================================== + +The experiment directory is: + + > EXPTDIR=\"$EXPTDIR\" + +To launch the workflow, first ensure that you have a compatible version +of rocoto loaded. For example, to load version 1.3.1 of rocoto, use + + > module load rocoto/1.3.1 + +(This version has been tested on hera; later versions may also work but +have not been tested.) To launch the workflow, change location to the +experiment directory (EXPTDIR) and issue the rocotrun command, as fol- +lows: + + > cd $EXPTDIR + > ${rocotorun_cmd} + +To check on the status of the workflow, issue the rocotostat command +(also from the experiment directory): + + > ${rocotostat_cmd} + +Note that: + +1) The rocotorun command must be issued after the completion of each + task in the workflow in order for the workflow to submit the next + task(s) to the queue. + +2) In order for the output of the rocotostat command to be up-to-date, + the rocotorun command must be issued immediately before the rocoto- + stat command. + +For automatic resubmission of the workflow (say every 3 minutes), the +following line can be added to the user's crontab (use \"crontab -e\" to +edit the cron table): + +*/3 * * * * cd $EXPTDIR && ${rocotorun_cmd} + +Done. +" # #----------------------------------------------------------------------- # -# If using CCPP with the GSD physics suite... +# Restore the shell options saved at the beginning of this script/func- +# tion. # #----------------------------------------------------------------------- # - elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then - - print_info_msg_verbose " -Copying the FV3 namelist file for the GSD physics suite to the experi- -ment directory..." -# cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GSD_FN} \ -# $EXPTDIR/${FV3_NML_FN} - - if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN} \ - $EXPTDIR/${FV3_NML_FN} - elif [ "${EXTRN_MDL_NAME_ICS}" = "HRRRX" ]; then - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN} \ - $EXPTDIR/${FV3_NML_FN} - fi +{ restore_shell_opts; } > /dev/null 2>&1 - print_info_msg_verbose " -Copying the field table file for the GSD physics suite to the experiment -directory..." - cp_vrfy ${TEMPLATE_DIR}/${FIELD_TABLE_CCPP_GSD_FN} \ - $EXPTDIR/${FIELD_TABLE_FN} +} - print_info_msg_verbose " -Copying the CCPP XML file for the GSD physics suite to the experiment -directory..." - cp_vrfy ${NEMSfv3gfs_DIR}/ccpp/suites/suite_FV3_GSD_v0.xml \ - $EXPTDIR/suite_FV3_GSD_v0.xml - print_info_msg_verbose " -Copying the CCN fixed file needed by Thompson microphysics (part of the -GSD suite) to the experiment directory..." - cp_vrfy $FIXgsd/CCN_ACTIVATE.BIN $EXPTDIR - fi -# Original changes in stage_static.sh by Jeff: -# if [ "${CCPP_PHYS_SUITE}" = "GFS" ] && [ "$EXTRN_MDL_NAME_ICS" = "FV3GFS" ]; then # -# cp_vrfy $TEMPLATE_DIR/$FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN $EXPTDIR/$FV3_NML_FN -# cp_vrfy $TEMPLATE_DIR/$FIELD_TABLE_FN $EXPTDIR +#----------------------------------------------------------------------- # -# elif [ "${CCPP_PHYS_SUITE}" = "GSD" ] && [ "$EXTRN_MDL_NAME_ICS" = "FV3GFS" ]; then +# Start of the script that will call the experiment/workflow generation +# function defined above. # -# cp_vrfy $TEMPLATE_DIR/$FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN $EXPTDIR/$FV3_NML_FN -# cp_vrfy $TEMPLATE_DIR/$FIELD_TABLE_CCPP_GSD_FN $EXPTDIR/$FIELD_TABLE_FN +#----------------------------------------------------------------------- # -# elif [ "${CCPP_PHYS_SUITE}" = "GSD" ] && [ "$EXTRN_MDL_NAME_ICS" = "HRRRX" ]; then +set -u +#set -x # -# cp_vrfy $TEMPLATE_DIR/$FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN $EXPTDIR/$FV3_NML_FN -# cp_vrfy $TEMPLATE_DIR/$FIELD_TABLE_CCPP_GSD_FN $EXPTDIR/$FIELD_TABLE_FN +#----------------------------------------------------------------------- # -# fi +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -# If not using CCPP... +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -elif [ "${USE_CCPP}" = "FALSE" ]; then - - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_FN} $EXPTDIR - cp_vrfy ${TEMPLATE_DIR}/${FIELD_TABLE_FN} $EXPTDIR - -fi - -cp_vrfy ${TEMPLATE_DIR}/${DATA_TABLE_FN} $EXPTDIR -cp_vrfy ${TEMPLATE_DIR}/${NEMS_CONFIG_FN} $EXPTDIR +# Set directories. # #----------------------------------------------------------------------- # -# Set the full path to the FV3SAR namelist file. Then set parameters in -# that file. +ushdir="${scrfunc_dir}" # -#----------------------------------------------------------------------- +# Set the name of and full path to the temporary file in which we will +# save some experiment/workflow variables. The need for this temporary +# file is explained below. # -FV3_NML_FP="$EXPTDIR/${FV3_NML_FN}" - -print_info_msg_verbose " -Setting parameters in FV3 namelist file (FV3_NML_FP): - FV3_NML_FP = \"${FV3_NML_FP}\"" +tmp_fn="tmp" +tmp_fp="$ushdir/${tmp_fn}" +rm -f "${tmp_fp}" # -# Set npx_T7 and npy_T7, which are just nx_T7 plus 1 and ny_T7 plus 1, -# respectively. These need to be set in the FV3SAR Fortran namelist -# file. They represent the number of cell vertices in the x and y di- -# rections on the regional grid (tile 7). +# Set the name of and full path to the log file in which the output from +# the experiment/workflow generation function will be saved. # -npx_T7=$(($nx_T7+1)) -npy_T7=$(($ny_T7+1)) +log_fn="log.generate_FV3SAR_wflow" +log_fp="$ushdir/${log_fn}" +rm -f "${log_fp}" # -# Set parameters. +# Call the generate_FV3SAR_wflow function defined above to generate the +# experiment/workflow. Note that we pipe the output of the function +# (and possibly other commands) to the "tee" command in order to be able +# to both save it to a file and print it out to the screen (stdout). +# The piping causes the call to the function (and the other commands +# grouped with it using the curly braces, { ... }) to be executed in a +# subshell. As a result, the experiment/workflow variables that the +# function sets are not available outside of the grouping, i.e. they are +# not available at and after the call to "tee". Since some of these va- +# riables are needed after the call to "tee" below, we save them in a +# temporary file and read them in outside the subshell later below. # -set_file_param "${FV3_NML_FP}" "blocksize" "$blocksize" -set_file_param "${FV3_NML_FP}" "layout" "${layout_x},${layout_y}" -set_file_param "${FV3_NML_FP}" "npx" "${npx_T7}" -set_file_param "${FV3_NML_FP}" "npy" "${npy_T7}" - -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then -# Question: -# For a regional grid (i.e. one that only has a tile 7) should the co- -# ordinates that target_lon and target_lat get set to be those of the -# center of tile 6 (of the parent grid) or those of tile 7? These two -# are not necessarily the same [although assuming there is only one re- -# gional domain within tile 6, i.e. assuming there is no tile 8, 9, etc, -# there is no reason not to center tile 7 with respect to tile 6]. - set_file_param "${FV3_NML_FP}" "target_lon" "${lon_ctr_T6}" - set_file_param "${FV3_NML_FP}" "target_lat" "${lat_ctr_T6}" -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - set_file_param "${FV3_NML_FP}" "target_lon" "${lon_rgnl_ctr}" - set_file_param "${FV3_NML_FP}" "target_lat" "${lat_rgnl_ctr}" -fi -set_file_param "${FV3_NML_FP}" "stretch_fac" "${stretch_fac}" -set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" +{ +generate_FV3SAR_wflow 2>&1 # If this exits with an error, the whole {...} group quits, so things don't work... +retval=$? +echo "$EXPTDIR" >> "${tmp_fp}" +echo "$retval" >> "${tmp_fp}" +} | tee "${log_fp}" # -#----------------------------------------------------------------------- +# Read in experiment/workflow variables needed later below from the tem- +# porary file created in the subshell above containing the call to the +# generate_FV3SAR_wflow function. These variables are not directly +# available here because the call to generate_FV3SAR_wflow above takes +# place in a subshell (due to the fact that we are then piping its out- +# put to the "tee" command). Then remove the temporary file. # -# Restore the shell options saved at the beginning of this script/func- -# tion. +exptdir=$( sed "1q;d" "${tmp_fp}" ) +retval=$( sed "2q;d" "${tmp_fp}" ) +rm "${tmp_fp}" # -#----------------------------------------------------------------------- +# If the call to the generate_FV3SAR_wflow function above was success- +# ful, move the log file in which the "tee" command saved the output of +# the function to the experiment directory. # -{ restore_shell_opts; } > /dev/null 2>&1 +if [ $retval -eq 0 ]; then + mv "${log_fp}" "$exptdir" +# +# If the call to the generate_FV3SAR_wflow function above was not suc- +# cessful, print out an error message and exit with a nonzero return +# code. +# +else + printf " +Experiment/workflow generation failed. Check the log file from the ex- +periment/workflow generation script in the file specified by log_fp: + log_fp = \"${log_fp}\" +Stopping. +" + exit 1 +fi diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index a8cdb03048..5e111186c6 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -1,13 +1,12 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -19,7 +18,7 @@ # #----------------------------------------------------------------------- # -function get_extrn_mdl_file_dir_info () { +function get_extrn_mdl_file_dir_info() { # #----------------------------------------------------------------------- # @@ -32,8 +31,19 @@ function get_extrn_mdl_file_dir_info () { # #----------------------------------------------------------------------- # -# Get the name of the current function. This is useful as part of error -# and/or informational messages. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. # #----------------------------------------------------------------------- # @@ -60,22 +70,16 @@ function get_extrn_mdl_file_dir_info () { "varname_extrn_mdl_arcvrel_dir" \ ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. - if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${func_name}\" have been set as -follows: -" - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done - fi - - +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script/function. Note that these will be printed out only if VERBOSE +# is set to TRUE. +# +#----------------------------------------------------------------------- +# + print_input_args valid_args # #----------------------------------------------------------------------- # @@ -86,11 +90,16 @@ follows: if [ 0 = 1 ]; then if [ "$#" -ne "13" ]; then - print_err_msg_exit "${func_name}" "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + Usage: - ${FUNCNAME[0]} \ + ${func_name} \ extrn_mdl_name \ anl_or_fcst \ cdate_FV3SAR \ @@ -174,7 +183,6 @@ where the arguments are defined as follows: Name of the global variable that will contain the archive-relative di- rectory, i.e. the directory \"inside\" the archive file in which the ex- ternal model output files may be stored. - " fi @@ -242,14 +250,7 @@ fi #----------------------------------------------------------------------- # valid_vals_anl_or_fcst=( "ANL" "anl" "FCST" "fcst" ) - iselementof "$anl_or_fcst" valid_vals_anl_or_fcst || { \ - valid_vals_anl_or_fcst_str=$(printf "\"%s\" " "${valid_vals_anl_or_fcst[@]}"); - print_err_msg_exit "${func_name}" "\ -Value specified in anl_or_fcst is not supported: - anl_or_fcst = \"$anl_or_fcst\" -anl_or_fcst must be set to one of the following: - $valid_vals_anl_or_fcst_str -"; } + check_var_valid_value "anl_or_fcst" "valid_vals_anl_or_fcst" # # For convenience of checking input values, change contents of anl_or_- # fcst to uppercase. @@ -305,7 +306,7 @@ anl_or_fcst must be set to one of the following: # lbc_update_fhrs=( "" ) - if [ "$anl_or_fcst" = "FCST" ]; then + if [ "${anl_or_fcst}" = "FCST" ]; then lbc_update_fhrs=( "${LBC_UPDATE_FCST_HRS[@]}" ) # @@ -316,7 +317,7 @@ anl_or_fcst must be set to one of the following: # the start time of the external model run. # num_fhrs=${#lbc_update_fhrs[@]} - for (( i=0; i<=$(( $num_fhrs - 1 )); i++ )); do + for (( i=0; i<=$((num_fhrs-1)); i++ )); do lbc_update_fhrs[$i]=$(( ${lbc_update_fhrs[$i]} + time_offset_hrs )) done @@ -329,8 +330,8 @@ anl_or_fcst must be set to one of the following: # #----------------------------------------------------------------------- # - if [ "$extrn_mdl_name" = "RAPX" ] || \ - [ "$extrn_mdl_name" = "HRRRX" ]; then + if [ "${extrn_mdl_name}" = "RAPX" ] || \ + [ "${extrn_mdl_name}" = "HRRRX" ]; then # # Get the Julian day-of-year of the starting date and time of the exter- # nal model run. @@ -351,7 +352,13 @@ anl_or_fcst must be set to one of the following: # #----------------------------------------------------------------------- # - case "$anl_or_fcst" in + if [ "${anl_or_fcst}" = "ANL" ]; then + fv3gfs_file_fmt="${FV3GFS_FILE_FMT_ICS}" + elif [ "${anl_or_fcst}" = "FCST" ]; then + fv3gfs_file_fmt="${FV3GFS_FILE_FMT_LBCS}" + fi + + case "${anl_or_fcst}" in # #----------------------------------------------------------------------- # @@ -364,7 +371,7 @@ anl_or_fcst must be set to one of the following: fcst_hh="00" fcst_mn="00" - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in "GSMGFS") # fns=( "atm" "sfc" "nst" ) @@ -377,7 +384,7 @@ anl_or_fcst must be set to one of the following: "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then # fns=( "atm" "sfc" "nst" ) fns=( "atm" "sfc" ) @@ -386,9 +393,13 @@ anl_or_fcst must be set to one of the following: suffix="anl.nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then #Only 0.25 degree files for now + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then - fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) +# GSK 12/16/2019: +# Turns out that the .f000 file contains certain necessary fields that +# are not in the .anl file, so switch to the former. +# fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. + fns=( "gfs.t${hh}z.pgrb2.0p25.f000" ) # Get only 0.25 degree files for now. fi ;; @@ -402,13 +413,12 @@ anl_or_fcst must be set to one of the following: ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): - extrn_mdl_name = \"$extrn_mdl_name\" - anl_or_fcst = \"$anl_or_fcst\" -" + extrn_mdl_name = \"${extrn_mdl_name}\" + anl_or_fcst = \"${anl_or_fcst}\"" ;; esac @@ -424,7 +434,7 @@ bination of external model (extrn_mdl_name) and analysis or forecast fcst_mn="00" - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in "GSMGFS") fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) @@ -435,13 +445,13 @@ bination of external model (extrn_mdl_name) and analysis or forecast ;; "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) prefix="gfs.t${hh}z.atmf" fns=( "${fcst_hhh[@]/#/$prefix}" ) suffix=".nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) prefix="gfs.t${hh}z.pgrb2.0p25.f" fns=( "${fcst_hhh[@]/#/$prefix}" ) @@ -465,13 +475,12 @@ bination of external model (extrn_mdl_name) and analysis or forecast ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): - extrn_mdl_name = \"$extrn_mdl_name\" - anl_or_fcst = \"$anl_or_fcst\" -" + extrn_mdl_name = \"${extrn_mdl_name}\" + anl_or_fcst = \"${anl_or_fcst}\"" ;; esac @@ -490,13 +499,13 @@ bination of external model (extrn_mdl_name) and analysis or forecast # #----------------------------------------------------------------------- # - if [ "$anl_or_fcst" = "ANL" ]; then - sysbasedir="$EXTRN_MDL_FILES_SYSBASEDIR_ICS" - elif [ "$anl_or_fcst" = "FCST" ]; then - sysbasedir="$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" + if [ "${anl_or_fcst}" = "ANL" ]; then + sysbasedir="${EXTRN_MDL_FILES_SYSBASEDIR_ICS}" + elif [ "${anl_or_fcst}" = "FCST" ]; then + sysbasedir="${EXTRN_MDL_FILES_SYSBASEDIR_LBCS}" fi - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in # # It is not clear which, if any, systems the (old) spectral GFS model is @@ -520,12 +529,11 @@ bination of external model (extrn_mdl_name) and analysis or forecast sysdir="" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" - MACHINE = \"$MACHINE\" -" + extrn_mdl_name = \"${extrn_mdl_name}\" + MACHINE = \"$MACHINE\"" ;; esac ;; @@ -549,12 +557,11 @@ has not been specified for this external model and machine combination: sysdir="$sysbasedir/${yyyymmdd}" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" - MACHINE = \"$MACHINE\" -" + extrn_mdl_name = \"${extrn_mdl_name}\" + MACHINE = \"$MACHINE\"" ;; esac ;; @@ -578,12 +585,11 @@ has not been specified for this external model and machine combination: sysdir="$sysbasedir" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" - MACHINE = \"$MACHINE\" -" + extrn_mdl_name = \"${extrn_mdl_name}\" + MACHINE = \"$MACHINE\"" ;; esac ;; @@ -607,23 +613,21 @@ has not been specified for this external model and machine combination: sysdir="$sysbasedir" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" - MACHINE = \"$MACHINE\" -" + extrn_mdl_name = \"${extrn_mdl_name}\" + MACHINE = \"$MACHINE\"" ;; esac ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model: - extrn_mdl_name = \"$extrn_mdl_name\" -" + extrn_mdl_name = \"${extrn_mdl_name}\"" esac # @@ -643,16 +647,16 @@ has not been specified for this external model: # #----------------------------------------------------------------------- # - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in "GSMGFS") arcv_dir="/NCEPPROD/hpssprod/runhistory/rh${yyyy}/${yyyy}${mm}/${yyyymmdd}" arcv_fmt="tar" arcv_fns="gpfs_hps_nco_ops_com_gfs_prod_gfs.${cdate}." - if [ "$anl_or_fcst" = "ANL" ]; then + if [ "${anl_or_fcst}" = "ANL" ]; then arcv_fns="${arcv_fns}anl" arcvrel_dir="." - elif [ "$anl_or_fcst" = "FCST" ]; then + elif [ "${anl_or_fcst}" = "FCST" ]; then arcv_fns="${arcv_fns}sigma" arcvrel_dir="/gpfs/hps/nco/ops/com/gfs/prod/gfs.${yyyymmdd}" fi @@ -661,7 +665,7 @@ has not been specified for this external model: ;; "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then if [ "${cdate_FV3SAR}" -le "2019061206" ]; then arcv_dir="/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_C/Q2FY19/prfv3rt3/${cdate_FV3SAR}" @@ -671,10 +675,10 @@ has not been specified for this external model: arcv_fns="gpfs_dell1_nco_ops_com_gfs_prod_gfs.${yyyymmdd}_${hh}." fi arcv_fmt="tar" - if [ "$anl_or_fcst" = "ANL" ]; then + if [ "${anl_or_fcst}" = "ANL" ]; then arcv_fns="${arcv_fns}gfs_nemsioa" arcvrel_dir="./gfs.${yyyymmdd}/${hh}" - elif [ "$anl_or_fcst" = "FCST" ]; then + elif [ "${anl_or_fcst}" = "FCST" ]; then last_fhr_in_nemsioa="39" first_lbc_fhr="${lbc_update_fhrs[0]}" last_lbc_fhr="${lbc_update_fhrs[-1]}" @@ -688,7 +692,7 @@ has not been specified for this external model: arcvrel_dir="./gfs.${yyyymmdd}/${hh}" fi - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then arcv_dir="/NCEPPROD/hpssprod/runhistory/rh${yyyy}/${yyyy}${mm}/${yyyymmdd}" arcv_fns="gpfs_dell1_nco_ops_com_gfs_prod_gfs.${yyyymmdd}_${hh}.gfs_pgrb2" @@ -753,10 +757,9 @@ has not been specified for this external model: ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ Archive file information has not been specified for this external model: - extrn_mdl_name = \"$extrn_mdl_name\" -" + extrn_mdl_name = \"${extrn_mdl_name}\"" ;; esac diff --git a/ush/iselementof.sh b/ush/iselementof.sh deleted file mode 100644 index 8c93fbe598..0000000000 --- a/ush/iselementof.sh +++ /dev/null @@ -1,111 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that is used to check whether a given ar- -# ray contains a specified string as one of its elements. It is called -# as follows: -# -# iselemof "$str_to_match" array_name -# -# where $str_to_match is the string to find in the array array_name. -# Use this function in a script as follows: -# -# . ./iselementof.sh -# array_name=("1" "2" "3 4" "5") -# -# str_to_match="2" -# iselementof "$str_to_match" array_name -# echo $? # Should output 0. -# -# str_to_match="3 4" -# iselementof "$str_to_match" array_name -# echo $? # Should output 0. -# -# str_to_match="6" -# iselementof "$str_to_match" array_name -# echo $? # Should output 1. -# -# Note that the first argument to this function is the array name (with- -# out a "$" before it or "[@]" after it). -# -#----------------------------------------------------------------------- -# -function iselementof () { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Check arguments. -# -#----------------------------------------------------------------------- -# - if [ "$#" -ne 2 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} str_to_match array_name - -where the arguments are defined as follows: - - str_to_match: - The string to find in array_name (as one of its elements). - - array_name: - The name of the array to search. - -" - - fi -# -#----------------------------------------------------------------------- -# -# Set local variables to appropriate input arguments. -# -#----------------------------------------------------------------------- -# - local match="$1" - local array="$2[@]" -# -#----------------------------------------------------------------------- -# -# Loop through the array elements and look for $match in the array. If -# it is found, set contains to 0. Otherwise, set it to 1. -# -#----------------------------------------------------------------------- -# - local contains=1 - local element - for element in "${!array}"; do - if [ "$element" = "$match" ]; then - contains=0 - break - fi - done -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Return the variable "contains". -# -#----------------------------------------------------------------------- -# - return $contains -} - diff --git a/ush/launch_FV3SAR_wflow.sh b/ush/launch_FV3SAR_wflow.sh new file mode 100755 index 0000000000..db583d62df --- /dev/null +++ b/ush/launch_FV3SAR_wflow.sh @@ -0,0 +1,407 @@ +#!/bin/bash -l + +# +#----------------------------------------------------------------------- +# +# Set shell options. +# +#----------------------------------------------------------------------- +# +set -u +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the experiment directory. We assume that there is a symlink to +# this script in the experiment directory, and this script is called via +# that symlink. Thus, finding the directory in which the symlink is lo- +# cated will give us the experiment directory. We find this by first +# obtaining the directory portion (i.e. the portion without the name of +# this script) of the command that was used to called this script (i.e. +# "$0") and then use the "readlink -f" command to obtain the correspond- +# ing absolute path. This will work for all four of the following ways +# in which the symlink in the experiment directory pointing to this +# script may be called: +# +# 1) Call this script from the experiment directory: +# > cd /path/to/experiment/directory +# > launch_FV3SAR_wflow.sh +# +# 2) Call this script from the experiment directory but using "./" be- +# fore the script name: +# > cd /path/to/experiment/directory +# > ./launch_FV3SAR_wflow.sh +# +# 3) Call this script from any directory using the absolute path to the +# symlink in the experiment directory: +# > /path/to/experiment/directory/launch_FV3SAR_wflow.sh +# +# 4) Call this script from a directory that is several levels up from +# the experiment directory (but not necessarily at the root directo- +# ry): +# > cd /path/to +# > experiment/directory/launch_FV3SAR_wflow.sh +# +# Note that given just a file name, e.g. the name of this script without +# any path before it, the "dirname" command will return a ".", e.g. in +# bash, +# +# > exptdir=$( dirname "launch_FV3SAR_wflow.sh" ) +# > echo $exptdir +# +# will print out ".". +# +#----------------------------------------------------------------------- +# +exptdir=$( dirname "$0" ) +exptdir=$( readlink -f "$exptdir" ) +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file for the experiment. +# +#----------------------------------------------------------------------- +# +. $exptdir/var_defns.sh +# +#----------------------------------------------------------------------- +# +# Set the name of the experiment. We take this to be the name of the +# experiment subdirectory (i.e. the string after the last "/" in the +# full path to the experiment directory). +# +#----------------------------------------------------------------------- +# +expt_name="${EXPT_SUBDIR}" +# +#----------------------------------------------------------------------- +# +# Load necessary modules. +# +#----------------------------------------------------------------------- +# +module purge +module load rocoto +# +#----------------------------------------------------------------------- +# +# Set file names. These include the rocoto database file and the log +# file in which to store output from this script (aka the workflow +# launch script). +# +#----------------------------------------------------------------------- +# +rocoto_xml_bn=$( basename "${WFLOW_XML_FN}" ".xml" ) +rocoto_database_fn="${rocoto_xml_bn}.db" +launch_log_fn="log.launch_${rocoto_xml_bn}" +# +#----------------------------------------------------------------------- +# +# Initialize the default status of the workflow to "IN PROGRESS". +# +#----------------------------------------------------------------------- +# +wflow_status="IN PROGRESS" +# +#----------------------------------------------------------------------- +# +# Change location to the experiment directory. +# +#----------------------------------------------------------------------- +# +cd "$exptdir" +# +#----------------------------------------------------------------------- +# +# Issue the rocotorun command to (re)launch the next task in the +# workflow. Then check for error messages in the output of rocotorun. +# If any are found, it means the end-to-end run of the workflow failed. +# In this case, we remove the crontab entry that launches the workflow, +# and we append an appropriate failure message at the end of the launch +# log file. +# +#----------------------------------------------------------------------- +# + +#rocotorun_output=$( ls -alF ) +#echo +#echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +#echo "${rocotorun_output}" +#echo "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB" + +#rocotorun_output=$( \ +#rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 \ +#) +#rocotorun_output=$( (rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10) 2>&1 ) # This freezes the script. +#rocotorun_output=$( (rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10) 1>&2 ) # This leaves rocotorun_output empty. +#rocotorun_output=$( rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 ) +#{ error=$(command 2>&1 1>&$out); } {out}>&1 +#{ rocotorun_output=$( rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 2>&1 1>&$out); } {out}>&1 # This freezes the script. + +# +# Ideally, the following two lines should work, but for some reason the +# output of rocotorun cannot be captured in a variable using the $(...) +# notation. Maybe it's not being written to stdout, although I tried +# redirecting stderr to stdout and other tricks but nothing seemed to +# work. For this reason, below we first redirect the output of rocoto- +# run to a temporary file and then read in the contents of that file in- +# to the rocotorun_output variable using the cat command. +# +#rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" +#rocotorun_output=$( eval ${rocotorun_cmd} 2>&1 ) +# +tmp_fn="rocotorun_output.txt" +#rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10 > ${tmp_fn}" +rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" +eval ${rocotorun_cmd} > ${tmp_fn} 2>&1 +rocotorun_output=$( cat "${tmp_fn}" ) +rm "${tmp_fn}" + +error_msg="sbatch: error: Batch job submission failed:" +# Job violates accounting/QOS policy (job submit limit, user's size and/or time limits)" +while read -r line; do + grep_output=$( printf "$line" | grep "${error_msg}" ) + if [ $? -eq 0 ]; then + wflow_status="FAILURE" + break + fi +done <<< "${rocotorun_output}" +# +#----------------------------------------------------------------------- +# +# Issue the rocotostat command to obtain a table specifying the status +# of each task. Then check for dead tasks in the output of rocotostat. +# If any are found, it means the end-to-end run of the workflow failed. +# In this case, we remove the crontab entry that launches the workflow, +# and we append an appropriate failure message at the end of the launch +# log file. +# +#----------------------------------------------------------------------- +# +#rocotostat_cmd="{ pwd; rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +#rocotostat_cmd="{ pwd; ls -alF; rocotostat -w ${WFLOW_XML_FN} -d ${rocoto_database_fn} -v 10; }" +#rocotostat_cmd="{ pwd; ls -alF; rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +#rocotostat_cmd="{ pwd; rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +#rocotostat_cmd="{ rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +rocotostat_cmd="rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" + +#rocotostat_output=$( pwd; rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 2>&1 ) +#rocotostat_output=$( rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 2>&1 ) +rocotostat_output=$( eval ${rocotostat_cmd} 2>&1 ) +#rocotostat_output=$( ${rocotostat_cmd} 2>&1 ) +#rocotostat_output=$( { pwd; ls -alF; } 2>&1 ) +error_msg="DEAD" +while read -r line; do + grep_output=$( printf "$line" | grep "${error_msg}" ) + if [ $? -eq 0 ]; then + wflow_status="FAILURE" + break + fi +done <<< "${rocotostat_output}" +# +#----------------------------------------------------------------------- +# +# Place the outputs of the rocotorun and rocotostat commands obtained +# above into the launch log file. +# +#----------------------------------------------------------------------- +# +printf " + +======================================================================== +Start of output from script \"${scrfunc_fn}\". +======================================================================== + +Running rocotorun command (rocotorun_cmd): +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + rocotorun_cmd = \'${rocotorun_cmd}\' + +Output of rocotorun_cmd is: +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +${rocotorun_output} + +Running rocotostat command (rocotostat_cmd): +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + rocotostat_cmd = \'${rocotostat_cmd}\' + +Output of rocotostat_cmd is: +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +${rocotostat_output} +" >> "${WFLOW_LAUNCH_LOG_FN}" 2>&1 +# +#----------------------------------------------------------------------- +# +# Use the rocotostat command with the "-s" flag to obtain a summary of +# the status of each cycle in the workflow. The output of this command +# has the following format: +# +# CYCLE STATE ACTIVATED DEACTIVATED +# 201905200000 Active Nov 07 2019 00:23:30 - +# ... +# +# Thus, the first row is a header line containing the column titles, and +# the remaining rows each correspond to one cycle in the workflow. Be- +# low, we are interested in the first and second columns of each row. +# The first column is a string containing the start time of the cycle +# (in the format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the +# 2-digit month, DD is the 2-digit day of the month, HH is the 2-digit +# hour of the day, and mm is the 2-digit minute of the hour). The se- +# cond column is a string containing the state of the cycle. This can +# be "Active" or "Done". Below, we read in and store these two columns +# in (1-D) arrays. +# +#----------------------------------------------------------------------- +# +rocotostat_output=$( rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 -s ) + +regex_search="^[ ]*([0-9]+)[ ]+([A-Za-z]+)[ ]+.*" +cycle_str=() +cycle_status=() +i=0 +while read -r line; do +# +# Note that the first line in rocotostat_output is a header line con- +# taining the column titles. Thus, we ignore it and consider only the +# remaining lines (of which there is one per cycle). +# + if [ $i -gt 0 ]; then + im1=$((i-1)) + cycle_str[im1]=$( echo "$line" | sed -r -n -e "s/${regex_search}/\1/p" ) + cycle_status[im1]=$( echo "$line" | sed -r -n -e "s/${regex_search}/\2/p" ) + fi + i=$((i+1)) +done <<< "${rocotostat_output}" +# +#----------------------------------------------------------------------- +# +# Get the number of cycles. Then count the number of completed cycles +# by finding the number of cycles for which the corresponding element in +# the cycle_status array is set to "Done". +# +#----------------------------------------------------------------------- +# +num_cycles_total=${#cycle_str[@]} +num_cycles_completed=0 +for (( i=0; i<=$((num_cycles_total-1)); i++ )); do + if [ "${cycle_status}" = "Done" ]; then + num_cycles_completed=$((num_cycles_completed+1)) + fi +done +# +#----------------------------------------------------------------------- +# +# If the number of completed cycles is equal to the total number of cy- +# cles, it means the end-to-end run of the workflow was successful. In +# this case, we reset the wflow_status to "SUCCESS". +# +#----------------------------------------------------------------------- +# +if [ ${num_cycles_completed} -eq ${num_cycles_total} ]; then + wflow_status="SUCCESS" +fi +# +#----------------------------------------------------------------------- +# +# Print informational messages about the workflow to the launch log +# file, including the workflow status. +# +#----------------------------------------------------------------------- +# +printf " + +Summary of workflow status: +~~~~~~~~~~~~~~~~~~~~~~~~~~ + + ${num_cycles_completed} out of ${num_cycles_total} cycles completed. + Workflow status: ${wflow_status} + +======================================================================== +End of output from script \"${scrfunc_fn}\". +======================================================================== + +" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 +# +#----------------------------------------------------------------------- +# +# If the workflow status is now either "SUCCESS" or "FAILURE", indicate +# this by appending an appropriate workflow completion message to the +# end of the launch log file. +# +#----------------------------------------------------------------------- +# +if [ "${wflow_status}" = "SUCCESS" ] || \ + [ "${wflow_status}" = "FAILURE" ]; then + + msg=" +The end-to-end run of the workflow for the forecast experiment specified +by expt_name has completed with the following workflow status (wflow_- +status): + expt_name = \"${expt_name}\" + wflow_status = \"${wflow_status}\" +" +# +# If a cron job was being used to periodically relaunch the workflow, we +# now remove the entry in the crontab corresponding to the workflow be- +# cause the end-to-end run of the workflow has now either succeeded or +# failed and will remain in that state without manual user intervention. +# Thus, there is no need to try to relaunch it. We also append a mes- +# sage to the completion message above to indicate this. +# + if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + + msg="${msg}\ +Thus, there is no need to relaunch the workflow via a cron job. Remo- +ving from the crontab the line (CRONTAB_LINE) that calls the workflow +launch script for this experiment: + CRONTAB_LINE = \"${CRONTAB_LINE}\" +" +# +# Below, we use "grep" to determine whether the crontab line that the +# variable CRONTAB_LINE contains is already present in the cron table. +# For that purpose, we need to escape the asterisks in the string in +# CRONTAB_LINE with backslashes. Do this next. +# + crontab_line_esc_astr=$( printf "%s" "${CRONTAB_LINE}" | \ + sed -r -e "s%[*]%\\\\*%g" ) +# +# In the string passed to the grep command below, we use the line start +# and line end anchors ("^" and "$", respectively) to ensure that we on- +# ly find lines in the crontab that contain exactly the string in cron- +# tab_line_esc_astr without any leading or trailing characters. +# + ( crontab -l | grep -v "^${crontab_line_esc_astr}$" ) | crontab - + + fi +# +# Print the workflow completion message to the launch log file. +# + printf "$msg" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 +# +# If the stdout from this script is being sent to the screen (e.g. it is +# not being redirected to a file), then also print out the workflow +# completion message to the screen. +# + if [ -t 1 ]; then + printf "$msg" + fi + +fi + + + + diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 3af35cb411..0aff351b6b 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -3,35 +3,40 @@ # #----------------------------------------------------------------------- # -# Get the name of the current script and the directory in which it is -# located. This script should be located in USHDIR, so set USHDIR to -# the script directory (USHDIR is needed in various places below or in -# sourced scripts). +# This file defines a function that ... # #----------------------------------------------------------------------- # -script_name=$(basename ${BASH_SOURCE[0]}) -script_dir=$(dirname ${BASH_SOURCE[0]}) -USHDIR="${script_dir}" +function link_fix() { # #----------------------------------------------------------------------- # -# Source the function definitions file, which should be in the same di- -# rectory as the current script. This is needed in order to be able to -# use the process_args() function below. +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # -. $USHDIR/source_funcs.sh + { save_shell_opts; set -u -x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # @@ -41,39 +46,63 @@ USHDIR="${script_dir}" # #----------------------------------------------------------------------- # -valid_args=( "verbose" \ - "script_var_defns_fp" \ - "file_group" \ - ) -process_args valid_args "$@" + local valid_args=( \ +"verbose" \ +"file_group" \ +"output_varname_res_in_filenames" \ + ) + process_args valid_args "$@" # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -. ${script_var_defns_fp} + print_input_args valid_args # #----------------------------------------------------------------------- # -# If verbose is set to TRUE, print out what each valid argument has been -# set to. +# Declare local variables. # #----------------------------------------------------------------------- # -if [ "$verbose" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ -The arguments to script/function \"${script_name}\" have been set as -follows: -" 1>&2 - for (( i=0; i<$num_valid_args; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" 1>&2 - done -fi + local valid_vals_verbose \ + valid_vals_file_group \ + fns \ + fps \ + run_task \ + sfc_climo_fields \ + num_fields \ + i \ + ii \ + res_prev \ + res \ + fp_prev \ + fp \ + fn \ + relative_or_null \ + cres \ + tmp \ + fns_sfc_climo_with_halo_in_fn \ + fns_sfc_climo_no_halo_in_fn \ + target \ + symlink +# +#----------------------------------------------------------------------- +# +# Set the valid values that various input arguments can take on and then +# ensure that the values passed in are one of these valid values. +# +#----------------------------------------------------------------------- +# + valid_vals_verbose=( "TRUE" "FALSE" ) + check_var_valid_value "verbose" "valid_vals_verbose" + + valid_vals_file_group=( "grid" "orog" "sfc_climo" ) + check_var_valid_value "file_group" "valid_vals_file_group" # #----------------------------------------------------------------------- # @@ -88,30 +117,39 @@ fi # #----------------------------------------------------------------------- # -if [ "$verbose" = "TRUE" ]; then - print_info_msg " -Creating links in the FIXsar directory to the grid files... -" -fi + print_info_msg "$verbose" " +Creating links in the FIXsar directory to the grid files..." # #----------------------------------------------------------------------- # -# Create globbing patterns for grid, orography, and surface climo files. +# Create globbing patterns for grid, orography, and surface climatology +# files. # #----------------------------------------------------------------------- # -fns_grid=( \ + case "${file_group}" in +# + "grid") + fns=( \ "C*_mosaic.nc" \ -"C*_grid.tile${TILE_RGNL}.halo${nh3_T7}.nc" \ -"C*_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" \ -) - -fns_orog=( \ -"C*_oro_data.tile${TILE_RGNL}.halo${nh0_T7}.nc" \ -"C*_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" \ -) - -sfc_climo_fields=( \ +"C*_grid.tile${TILE_RGNL}.halo${NH3}.nc" \ +"C*_grid.tile${TILE_RGNL}.halo${NH4}.nc" \ + ) + fps=( "${fns[@]/#/${GRID_DIR}/}" ) + run_task="${RUN_TASK_MAKE_GRID}" + ;; +# + "orog") + fns=( \ +"C*_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" \ +"C*_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" \ + ) + fps=( "${fns[@]/#/${OROG_DIR}/}" ) + run_task="${RUN_TASK_MAKE_OROG}" + ;; +# + "sfc_climo") + sfc_climo_fields=( \ "facsf" \ "maximum_snow_albedo" \ "slope_type" \ @@ -120,189 +158,197 @@ sfc_climo_fields=( \ "substrate_temperature" \ "vegetation_greenness" \ "vegetation_type" \ -) -num_fields=${#sfc_climo_fields[@]} -fns_sfc_climo=() -for (( i=0; i<${num_fields}; i++ )); do - ii=$((2*i)) - fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${nh0_T7}.nc" - fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${nh4_T7}.nc" -done -# -#----------------------------------------------------------------------- -# -# Prepend appropriate directory to each set of file name globbing pat- -# terns. -# -#----------------------------------------------------------------------- -# -fps_grid=( "${fns_grid[@]/#/${GRID_DIR}/}" ) -fps_orog=( "${fns_orog[@]/#/${OROG_DIR}/}" ) -fps_sfc_climo=( "${fns_sfc_climo[@]/#/${SFC_CLIMO_DIR}/}" ) - -if [ "${file_group}" = "grid" ]; then - fps_all=( "${fps_grid[@]}" ) - run_task="${RUN_TASK_MAKE_GRID}" -elif [ "${file_group}" = "orog" ]; then - fps_all=( "${fps_orog[@]}" ) - run_task="${RUN_TASK_MAKE_OROG}" -elif [ "${file_group}" = "sfc_climo" ]; then - fps_all=( "${fps_sfc_climo[@]}" ) - run_task="${RUN_TASK_MAKE_SFC_CLIMO}" -else - print_err_msg_exit "${script_name}" "\ -Invalid value specified for file_group. Valid values are: -" -fi + ) + num_fields=${#sfc_climo_fields[@]} + fns=() + for (( i=0; i<${num_fields}; i++ )); do + ii=$((2*i)) + fns[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" + fns[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" + done + fps=( "${fns[@]/#/${SFC_CLIMO_DIR}/}" ) + run_task="${RUN_TASK_MAKE_SFC_CLIMO}" + ;; +# + esac # #----------------------------------------------------------------------- # # Find all files matching the globbing patterns and make sure that they -# all have the same C-resolution in their names. +# all have the same resolution (an integer) in their names. # #----------------------------------------------------------------------- # -i=0 -res_prev="" -res="" -fp_prev="" - -for fp in ${fps_all[@]}; do + i=0 + res_prev="" + res="" + fp_prev="" - fn=$( basename $fp ) -printf "i = %s\n" "$i" -printf " fn = %s\n" "$fn" + for fp in ${fps[@]}; do - res=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*).*/\1/p" ) - if [ -z $res ]; then - print_err_msg_exit "${script_name}" "\ -The C-resolution could not be extracted from the current file's name. -The full path to the file (fp) is: + fn=$( basename $fp ) + + res=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*).*/\1/p" ) + if [ -z $res ]; then + print_err_msg_exit "\ +The resolution could not be extracted from the current file's name. The +full path to the file (fp) is: fp = \"${fp}\" This may be because fp contains the * globbing character, which would imply that no files were found that match the globbing pattern specified -in fp. -" - fi +in fp." + fi -printf " res_prev = %s\n" "${res_prev}" -printf " res = %s\n" "${res}" - if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then - print_err_msg_exit "${script_name}" "\ -The C-resolutions (as obtained from the file names) of the previous and + if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then + print_err_msg_exit "\ +The resolutions (as obtained from the file names) of the previous and current file (fp_prev and fp, respectively) are different: fp_prev = \"${fp_prev}\" fp = \"${fp}\" -Please ensure that all files have the same C-resolution. -" - fi +Please ensure that all files have the same resolution." + fi - i=$((i+1)) - fp_prev="$fp" - res_prev=${res} + i=$((i+1)) + fp_prev="$fp" + res_prev=${res} -done + done # #----------------------------------------------------------------------- # +# If the output variable name is not set to a null string, set it. This +# variable is just the resolution extracted from the file names in the +# specified file group. Note that if the output variable name is not +# specified in the call to this function, the process_args function will +# set it to a null string, in which case no output variable will be set. +# +#----------------------------------------------------------------------- # + if [ ! -z "${output_varname_res_in_filenames}" ]; then + eval ${output_varname_res_in_filenames}="$res" + fi # #----------------------------------------------------------------------- # -# Set RES to a null string if it is not already defined in the variable -# defintions file. +# Replace the * globbing character in the set of globbing patterns with +# the resolution. This will result in a set of (full paths to) specific +# files. # -RES=${RES:-""} -if [ "$RES" = "$res" ] || [ "$RES" = "" ]; then - cres="C${res}" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "${res}" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "${cres}" -elif [ "$RES" != "$res" ]; then - print_err_msg_exit "${script_name}" "\ -The resolution (RES) specified in the variable definitions file -(script_var_defns_fp) does not match the resolution (res) found in this -script for the specified file group (file_group): - script_var_defns_fp = \"${script_var_defns_fp}\" - RES = \"${RES}\" - file_group = \"${file_group}\" - res = \"${res}\" -This usually means that one or more of the file groups (grid, orography, -and/or surface climatology) are defined on different grids. -" -fi +#----------------------------------------------------------------------- +# + fps=( "${fps[@]/\*/$res}" ) # #----------------------------------------------------------------------- # -# Replace the * globbing character in the set of globbing patterns with -# the C-resolution. This will result in a set of (full paths to) speci- -# fic files. Use these as the link targets to create symlinks in the -# FIXsar directory. +# In creating the various symlinks below, it is convenient to work in +# the FIXsar directory. We will change directory back to the original +# later below. # #----------------------------------------------------------------------- # -fps_all=( "${fps_all[@]/\*/$res}" ) - -echo -printf "fps_all = ( \\ \n" -printf "\"%s\" \\ \n" "${fps_all[@]}" -printf ")" -echo - -relative_or_null="" -if [ "${run_task}" = "TRUE" ]; then - relative_or_null="--relative" -fi - -echo -echo "FIXsar = \"$FIXsar\"" + cd_vrfy "$FIXsar" +# +#----------------------------------------------------------------------- +# +# Use the set of full file paths generated above as the link targets to +# create symlinks to these files in the FIXsar directory. +# +#----------------------------------------------------------------------- +# + relative_or_null="" + if [ "${run_task}" = "TRUE" ]; then + relative_or_null="--relative" + fi -cd_vrfy $FIXsar -for fp in "${fps_all[@]}"; do - if [ -f "$fp" ]; then - ln_vrfy -sf ${relative_or_null} $fp . -# ln_vrfy -sf $fp . - else - print_err_msg_exit "${script_name}" "\ + for fp in "${fps[@]}"; do + if [ -f "$fp" ]; then + ln_vrfy -sf ${relative_or_null} $fp . + else + print_err_msg_exit "\ Cannot create symlink because target file (fp) does not exist: fp = \"${fp}\"" - fi -done + fi + done +# +#----------------------------------------------------------------------- +# +# Set the C-resolution based on the resolution appearing in the file +# names. +# +#----------------------------------------------------------------------- +# + cres="C$res" # #----------------------------------------------------------------------- # -# Create links locally (in the FIXsar directory) needed by the forecast -# task. These are "files" that the FV3 executable looks for. +# If considering grid files, create a symlink to the halo4 grid file +# that does not contain the halo size in its name. This is needed by +# the tasks that generate the initial and lateral boundary condition +# files. # #----------------------------------------------------------------------- # -if [ "${file_group}" = "grid" ]; then -# Create link to grid file needed by the make_ic and make_lbc tasks. - filename="${cres}_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" - ln_vrfy -sf ${relative_or_null} $filename ${cres}_grid.tile${TILE_RGNL}.nc -fi + if [ "${file_group}" = "grid" ]; then + target="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" + symlink="${cres}_grid.tile${TILE_RGNL}.nc" + ln_vrfy -sf $target $symlink +# +# The surface climatology file generation code looks for a grid file ha- +# ving a name of the form "C${GFDLgrid_RES}_tile7.halo4.nc" (i.e. the +# resolution used in this file is that of the number of grid points per +# horizontal direction per tile, just like in the global model). Thus, +# if we are running this code, if the grid is of GFDLgrid type, and if +# we are not using GFDLgrid_RES in filenames (i.e. we are using the +# equivalent global uniform grid resolution instead), then create a +# link whose name uses the GFDLgrid_RES that points to the link whose +# name uses the equivalent global uniform resolution. +# + if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] && \ + [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ + [ "${GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES}" = "FALSE" ]; then + target="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" + symlink="C${GFDLgrid_RES}_grid.tile${TILE_RGNL}.nc" + ln_vrfy -sf $target $symlink + fi -# Create links to surface climatology files needed by the make_ic task. -if [ "${file_group}" = "sfc_climo" ]; then + fi +# +#----------------------------------------------------------------------- +# +# If considering surface climatology files, create symlinks to the sur- +# face climatology files that do not contain the halo size in their +# names. These are needed by the task that generates the initial condi- +# tion files. +# +#----------------------------------------------------------------------- +# + if [ "${file_group}" = "sfc_climo" ]; then - tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) - fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${nh4_T7}.nc}" ) - fns_sfc_climo_no_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) + tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) + fns_sfc_climo_with_halo_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) + fns_sfc_climo_no_halo_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) - cd_vrfy $FIXsar - for (( i=0; i<${num_fields}; i++ )); do - target="${fns_sfc_climo_with_halo[$i]}" - symlink="${fns_sfc_climo_no_halo[$i]}" - if [ -f "$target" ]; then -# ln_vrfy -sf ${relative_or_null} $target $symlink - ln_vrfy -sf $target $symlink - else - print_err_msg_exit "${script_name}" "\ + for (( i=0; i<${num_fields}; i++ )); do + target="${fns_sfc_climo_with_halo_in_fn[$i]}" + symlink="${fns_sfc_climo_no_halo_in_fn[$i]}" + if [ -f "$target" ]; then + ln_vrfy -sf $target $symlink + else + print_err_msg_exit "\ Cannot create symlink because target file (target) does not exist: target = \"${target}\"" - fi - done + fi + done -fi + fi +# +#----------------------------------------------------------------------- +# +# Change directory back to original one. +# +#----------------------------------------------------------------------- +# + cd_vrfy - # #----------------------------------------------------------------------- # @@ -310,5 +356,6 @@ fi # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 + { restore_shell_opts; } > /dev/null 2>&1 +} diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh new file mode 100755 index 0000000000..fb149f1aa3 --- /dev/null +++ b/ush/load_modules_run_task.sh @@ -0,0 +1,327 @@ +#!/bin/bash + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# +if [ "$#" -ne 2 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Number of arguments specified: $# + +Usage: + + ${scrfunc_fn} task_name jjob_fp + +where the arguments are defined as follows: + + task_name: + The name of the rocoto task for which this script will load modules + and launch the J-job. + + jjob_fp + The full path to the J-job script corresponding to task_name. This + script will launch this J-job using the \"exec\" command (which will + first terminate this script and then launch the j-job; see man page of + the \"exec\" command). +" + +fi +# +#----------------------------------------------------------------------- +# +# Source the script that initializes the Lmod (Lua-based module) system/ +# software for handling modules. This script defines the module() and +# other functions. These are needed so we can perform the "module use +# ..." and "module load ..." calls later below that are used to load the +# appropriate module file for the specified task. +# +# Note that the build of the FV3 forecast model code generates the shell +# script at +# +# ${UFS_WTHR_MDL_DIR}/NEMS/src/conf/module-setup.sh +# +# that can be used to initialize the Lmod (Lua-based module) system/ +# software for handling modules. This script: +# +# 1) Detects the shell in which it is being invoked (i.e. the shell of +# the "parent" script in which it is being sourced). +# 2) Detects the machine it is running on and and calls the appropriate +# (shell- and machine-dependent) initalization script to initialize +# Lmod. +# 3) Purges all modules. +# 4) Uses the "module use ..." command to prepend or append paths to +# Lmod's search path (MODULEPATH). +# +# We could use this module-setup.sh script to initialize Lmod, but since +# it is only found in the forecast model's directory tree, here we pre- +# fer to perform our own initialization. Ideally, there should be one +# module-setup.sh script that is used by all external repos/codes, but +# such a script does not exist. If/when it does, we will consider +# switching to it instead of using the case-statement below. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Initializing the shell function \"module()\" (and others) in order to be +able to use \"module load ...\" to load necessary modules ..." + +case "$MACHINE" in +# + "WCOSS_C") + . /opt/modules/default/init/sh + ;; +# + "DELL") + . /usrx/local/prod/lmod/lmod/init/sh + ;; +# + "HERA") + . /apps/lmod/lmod/init/sh + ;; +# + "JET") + . /apps/lmod/lmod/init/sh + ;; +# + *) + print_err_msg_exit "\ +The script to source to initialize lmod (module loads) has not yet been +specified for the current machine (MACHINE): + MACHINE = \"$MACHINE\"" + ;; +# +esac +# +#----------------------------------------------------------------------- +# +# Get the task name and the name of the J-job script. +# +#----------------------------------------------------------------------- +# +task_name="$1" +jjob_fp="$2" +# +#----------------------------------------------------------------------- +# +# Set the directory (modules_dir) in which the module files for the va- +# rious workflow tasks are located. Also, set the name of the module +# file for the specified task. +# +# A module file is a file whose first line is the "magic cookie" string +# '#%Module'. It is interpreted by the "module load ..." command. It +# sets environment variables (including prepending/appending to paths) +# and loads modules. +# +# The regional_workflow repository contains module files for all the +# workflow tasks in the template rocoto XML file for the FV3SAR work- +# flow. The full path to a module file for a given task is +# +# $HOMErrfs/modulefiles/$machine/${task_name} +# +# where HOMErrfs is the base directory of the workflow, machine is the +# name of the machine that we're running on (in lowercase), and task_- +# name is the name of the current task (an input to this script). For +# all tasks in the rocoto XML except run_fcst, these are actual files +# (as opposed to symlinks). For the run_fcst task, there are two possi- +# ble module files. The first one is named "run_fcst_no_ccpp" and is +# used to run FV3 without CCPP (i.e. it is used if USE_CCPP is set to +# "FALSE" in the experiment/workflow configuration file). This is also +# an actual file. The second one is named "run_fcst_ccpp" and is used +# to run FV3 with CCPP (i.e. it is used if USE_CCPP is set to "TRUE"). +# This second file is a symlink (and is a part of the regional_workflow +# repo), and its target is +# +# ${UFS_WTHR_MDL_DIR}/NEMS/src/conf/modules.fv3 +# +# Here, UFS_WTHR_MDL_DIR is the directory in which the ufs_weather_model +# repository containing the FV3 model is cloned (normally "$HOMErrfs/ +# sorc/ufs_weather_model"), and modules.fv3 is a module file that is ge- +# nerated by the forecast model's build process. It contains the appro- +# priate modules to use when running the FV3 model. Thus, we just point +# to it via the symlink "run_fcst_ccpp" in the modulefiles/$machine di- +# rectory. +# +# QUESTION: +# Why don't we do this for the non-CCPP version of FV3? +# +# ANSWER: +# Because for that case, we load different versions of intel and impi +# (compare modules.nems to the modules loaded for the case of USE_CCPP +# set to "FALSE" in run_FV3SAR.sh). Maybe these can be combined at some +# point. Note that a modules.nems file is generated in the same rela- +# tive location in the non-CCPP-enabled version of the FV3 forecast mo- +# del, so maybe that can be used and the run_FV3SAR.sh script modified +# to accomodate such a change. That way the below can be performed for +# both the CCPP-enabled and non-CCPP-enabled versions of the forecast +# model. +# +#----------------------------------------------------------------------- +# +machine=${MACHINE,,} +modules_dir="$HOMErrfs/modulefiles/tasks/$machine" +modulefile_name="${task_name}" + +# Dom says that a correct modules.fv3 file is generated by the forecast +# model build regardless of whether building with or without CCPP. +# Thus, we can have a symlink named "run_fcst" that points to that file +# regardless of the setting of USE_CCPP. But this requires that we then +# test the non-CCPP-enabled version, which we've never done. Leave this +# for another time... +#if [ "${task_name}" = "run_fcst" ]; then +# if [ "${USE_CCPP}" = "TRUE" ]; then +# modulefile_name=${modulefile_name}_ccpp +# else +# modulefile_name=${modulefile_name}_no_ccpp +# fi +#fi +# +#----------------------------------------------------------------------- +# +# This comment needs to be updated: +# +# Use the "readlink" command to resolve the full path to the module file +# and then verify that the file exists. This is not necessary for most +# tasks, but for the run_fcst task, when CCPP is enabled, the module +# file in the modules directory is not a regular file but a symlink to a +# file in the ufs_weather_model external repo. This latter target file +# will exist only if the forecast model code has already been built. +# Thus, we now check to make sure that the module file exits. +# +#----------------------------------------------------------------------- +# +modulefile_path=$( readlink -f "${modules_dir}/${modulefile_name}" ) + +if [ ! -f "${modulefile_path}" ]; then + + if [ "${task_name}" = "${MAKE_OROG_TN}" ] || \ + [ "${task_name}" = "${MAKE_SFC_CLIMO_TN}" ] || \ + [ "${task_name}" = "${MAKE_ICS_TN}" ] || \ + [ "${task_name}" = "${MAKE_LBCS_TN}" ] || \ + [ "${task_name}" = "${RUN_FCST_TN}" ]; then + + print_err_msg_exit "\ +The target (modulefile_path) of the symlink (modulefile_name) in the +task modules directory (modules_dir) that points to module file for this +task (task_name) does not exist: + task_name = \"${task_name}\" + modulefile_name = \"${modulefile_name}\" + modules_dir = \"${modules_dir}\" + modulefile_path = \"${modulefile_path}\" +This is likely because the forecast model code has not yet been built." + + else + + print_err_msg_exit "\ +The module file (modulefile_path) specified for this task (task_name) +does not exist: + task_name = \"${task_name}\" + modulefile_path = \"${modulefile_path}\"" + + fi + +fi +# +#----------------------------------------------------------------------- +# +# Purge modules and load the module file for the specified task on the +# current machine. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Loading modules for task \"${task_name}\" ..." + +module purge + +module use "${modules_dir}" || print_err_msg_exit "\ +Call to \"module use\" command failed." + +# +# Some of the task module files that are symlinks to module files in the +# external repositories are in fact shell scripts (they shouldn't be; +# such cases should be fixed in the external repositories). For such +# files, we source the "module" file. For true module files, we use the +# "module load" command. +# +case "${task_name}" in +# +"${MAKE_ICS_TN}" | "${MAKE_LBCS_TN}" | "${MAKE_SFC_CLIMO_TN}") + . ${modulefile_path} || print_err_msg_exit "\ +Sourcing of \"module\" file (modulefile_path; really a shell script) for +the specified task (task_name) failed: + task_name = \"${task_name}\" + modulefile_path = \"${modulefile_path}\"" + ;; +# +*) + module load ${modulefile_name} || print_err_msg_exit "\ +Loading of module file (modulefile_name; in directory specified by mod- +ules_dir) for the specified task (task_name) failed: + task_name = \"${task_name}\" + modulefile_name = \"${modulefile_name}\" + modules_dir = \"${modules_dir}\"" + ;; +# +esac + +module list +# +#----------------------------------------------------------------------- +# +# Use the exec command to terminate the current script and launch the +# J-job for the specified task. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Launching J-job (jjob_fp) for task \"${task_name}\" ... + jjob_fp = \"${jjob_fp}\" +" +exec "${jjob_fp}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + + diff --git a/ush/print_msg.sh b/ush/print_msg.sh deleted file mode 100644 index bd778f6f80..0000000000 --- a/ush/print_msg.sh +++ /dev/null @@ -1,290 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines functions used in printing formatted output to std- -# out (e.g. informational and error messages). -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Function to print informational messages using printf. -# -#----------------------------------------------------------------------- -# -function print_info_msg() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Check arguments. -# -#----------------------------------------------------------------------- -# - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} msg - -where msg is the message to print." - fi -# -#----------------------------------------------------------------------- -# -# Set local variables. -# -#----------------------------------------------------------------------- -# - local info_msg="$1" -# -#----------------------------------------------------------------------- -# -# Remove trailing newlines from info_msg. Command substitution [i.e. -# the $( ... )] will do this automatically. -# -#----------------------------------------------------------------------- -# - info_msg=$( printf '%s' "${info_msg}" ) -# -#----------------------------------------------------------------------- -# -# Add informational lines at the beginning and end of the message. -# -#----------------------------------------------------------------------- -# - local MSG=$(printf "\ -$info_msg -") -# -#----------------------------------------------------------------------- -# -# Print out the message. -# -#----------------------------------------------------------------------- -# - printf '%s\n' "$MSG" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 -} - - - -# -#----------------------------------------------------------------------- -# -# Function to print informational messages using printf, but only if the -# VERBOSE flag is set to "TRUE". -# -#----------------------------------------------------------------------- -# -function print_info_msg_verbose() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Check arguments. -# -#----------------------------------------------------------------------- -# - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} msg - -where msg is the message to print." - fi -# -#----------------------------------------------------------------------- -# -# Print the message only if VERBOSE is set to "TRUE". -# -#----------------------------------------------------------------------- -# - if [ "$VERBOSE" = "TRUE" ]; then - print_info_msg "$1" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 -} - - - -# -#----------------------------------------------------------------------- -# -# Function to print error messages using printf and exit. -# -#----------------------------------------------------------------------- -# -function print_err_msg_exit() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# If no arguments are supplied, use a standard error message. -# -#----------------------------------------------------------------------- -# - if [ "$#" -eq 0 ]; then - - local MSG=$(printf "\ -ERROR. Exiting script or function with nonzero status. -") -# -#----------------------------------------------------------------------- -# -# If one argument is supplied, we assume it is the message to print out -# between informational lines that are always printed. -# -#----------------------------------------------------------------------- -# - elif [ "$#" -eq 1 ]; then - - local err_msg="$1" -# -#----------------------------------------------------------------------- -# -# Remove trailing newlines from err_msg. Command substitution [i.e. the -# $( ... )] will do this automatically. -# -#----------------------------------------------------------------------- -# - err_msg=$( printf '%s' "${err_msg}" ) -# -#----------------------------------------------------------------------- -# -# Add informational lines at the beginning and end of the message. -# -#----------------------------------------------------------------------- -# - local MSG=$(printf "\ -ERROR: -$err_msg -Exiting script/function with nonzero status. -") -# -#----------------------------------------------------------------------- -# -# If two arguments are supplied, we assume the first argument is the -# name of the script or function from which this function is being -# called while the second argument is the message to print out between -# informational lines that are always printed. -# -#----------------------------------------------------------------------- -# - elif [ "$#" -eq 2 ]; then - - local script_func_name="$1" - local err_msg="$2" -# -#----------------------------------------------------------------------- -# -# Remove trailing newlines from err_msg. Command substitution [i.e. the -# $( ... )] will do this automatically. -# -#----------------------------------------------------------------------- -# - err_msg=$( printf '%s' "${err_msg}" ) -# -#----------------------------------------------------------------------- -# -# Add informational lines at the beginning and end of the message. -# -#----------------------------------------------------------------------- -# - local MSG=$(printf "\ -ERROR from script/function \"${script_func_name}\": -$err_msg -Exiting script/function with nonzero status. -") -# -#----------------------------------------------------------------------- -# -# If more than two arguments are supplied, print out a usage error mes- -# sage. -# -#----------------------------------------------------------------------- -# - elif [ "$#" -gt 1 ]; then - - local MSG=$(printf "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} - -or - - ${FUNCNAME[0]} msg - -where msg is an optional error message to print. Exiting with nonzero status. -") - - fi -# -#----------------------------------------------------------------------- -# -# Print out MSG and exit function/script with nonzero status. -# -#----------------------------------------------------------------------- -# - printf '\n%s\n' "$MSG" 1>&2 - exit 1 -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. This statement will not be reached due to the preceeding exit -# statement, but we include it here for completeness (i.e. there should -# be a call to restore_shell_opts that matches a preceeding call to -# save_shell_opts). -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 -} - diff --git a/ush/process_args.sh b/ush/process_args.sh deleted file mode 100755 index 8a7e8c6a1f..0000000000 --- a/ush/process_args.sh +++ /dev/null @@ -1,271 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This function processes a list of variable name and value pairs passed -# to it as a set of arguments (starting with the second argument). Each -# name-value pair must have the form -# -# VAR_NAME=VAR_VALUE -# -# where VAR_NAME is the name of a variable and VAR_VALUE is the value it -# should have. For each name-value pair, this function creates a varia- -# ble of the specified name and assigns to it its corresponding value. -# -# The first argument to this function (valid_var_names) is the name of -# an array defined in the calling script that contains a list of valid -# variable values. The variable name specified in each name-value pair -# must correspond to one of the elements of this array. If it isn't, -# this function prints out an error message and exits with a nonzero -# exit code. Any variable in the list of valid variable names that is -# not assigned a value in a name-value pair gets set to the null string. -# -# This function may be called from a script as follows: -# -# valid_args=( "arg1" "arg2" "arg3" "arg4" ) -# process_args valid_args \ -# arg1="hello" \ -# arg3="goodbye" -# -# After the call to process_args in this script, there will exist four -# new (or reset) variables: arg1, arg2, arg3, and arg4. arg1 will be -# set to the string "hello", arg3 will be set to the string "goodby", -# and arg2 and arg4 will be set to the null string, i.e. "". -# -# The purpose of this function is to allow a script to process a set of -# arguments passed to it as variable name-and-value pairs by another -# script (aka the calling script) such that: -# -# 1) The calling script can only pass one of a restricted set of varia- -# bles to the child script. This set is specified within the child -# script and is known as the -# -# 2) The calling script can specify a subset of the allowed variables in -# the child script. Variables that are not specified are set to the -# null string. -# -# 1) The "export" feature doesn't have to be used -#. For exam- -# ple, assume the script outer_script.sh calls a second script named in- -# ner_script.sh as follows: -# -# inner_script.sh \ -# arg1="hi there" \ -# arg2="all done" -# -# To process the arguments arg1 and arg2 passed to it, inner_script.sh -# may contain the following code: -# -# valid_args=( "arg1" "arg2" "arg3" "arg4" ) -# process_args valid_args "$@" -# -# The call to process_args here would cause arg1 and arg2 to be created -# and set to "hi_there" and "all done", respectively, and for arg3 and -# arg4 to be created and set to "". Note that arg1 through arg4 would -# not be defined in the environment of outer_script.sh; they would only -# be defined in the environment of inner_script.sh. -# -# Note that variables may also be set to arrays. For example, the call -# in outer_script.sh to inner_script.sh may be modified to -# -# inner_script.sh \ -# arg1="hi there" \ -# arg2="all done" -# arg4='( "dog" "cat" )' -# -# This would cause the scalar variables arg1 and arg2 to be created in -# the environment of inner_script.sh and set to "hi there" and "all -# done", respectively, for arg3 to be created and set to "", and for -# arg4 to be created (as an array) and set to the array ( "dog" "cat" ). -# - -# process_args valid_args "$@" -# The variable may be set to a scalar or -# array value. -# creating a variable of the same name as the one specified in each -# name-value pair and assigning to it the value specified in that pair. -# The variable in each name-value pair can be a scalar or an array. -# -#----------------------------------------------------------------------- -# -function process_args() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Check arguments. -# -#----------------------------------------------------------------------- -# - if [ "$#" -lt 1 ]; then - - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} valid_var_names_array var_name_val_pair1 ... var_name_val_pairN - -where the arguments are defined as follows: - - valid_var_names_arrray: - The name of the array containing a list of valid variable names. - - var_name_val_pair1 ... var_name_val_pairN: - A list of N variable name-value pairs. These have the form - var_name1=\"var_val1\" ... var_nameN=\"var_valN\" - where each variable name (var_nameI) needs to be in the list of valid - variable names specified in valid_var_names_array. Note that not all - the valid variables listed in valid_var_names_array need to be set, - and the name-value pairs can be in any order (i.e. they don't have to - follow the order of variables listed in valid_var_names_array).\n" - - fi -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# - local valid_var_names_at \ - valid_var_names \ - valid_var_names_str \ - num_valid_var_names \ - num_name_val_pairs \ - i valid_var_name name_val_pair var_name var_value is_array - - valid_var_names_at="$1[@]" - valid_var_names=("${!valid_var_names_at}") - valid_var_names_str=$(printf "\"%s\" " "${valid_var_names[@]}"); - num_valid_var_names=${#valid_var_names[@]} -# -#----------------------------------------------------------------------- -# -# Get the number of name-value pairs specified as inputs to this func- -# tion. These consist of the all arguments starting with the 2nd, so -# we subtract 1 from the total number of arguments. -# -#----------------------------------------------------------------------- -# - num_name_val_pairs=$(( $#-1 )) -# -#----------------------------------------------------------------------- -# -# Make sure that the number of name-value pairs is less than or equal to -# the number of valid variable names. -# -#----------------------------------------------------------------------- -# - if [ "${num_name_val_pairs}" -gt "${num_valid_var_names}" ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": -The number of variable name-value pairs specified on the command line -must be less than or equal to the number of valid variable names speci- -fied in the array valid_var_names: - num_name_val_pairs = \"$num_name_val_pairs\" - num_valid_var_names = \"$num_valid_var_names\" -" - - fi -# -#----------------------------------------------------------------------- -# -# Initialize all valid variables to the null string. -# -#----------------------------------------------------------------------- -# - for (( i=0; i<$num_valid_var_names; i++ )); do - valid_var_name="${valid_var_names[$i]}" - eval ${valid_var_name}="" - valid_var_specified[$i]="false" - done -# -#----------------------------------------------------------------------- -# -# Loop over the list of variable name-value pairs and set variable val- -# ues. -# -#----------------------------------------------------------------------- -# - for name_val_pair in "${@:2}"; do - - var_name=$(echo ${name_val_pair} | cut -f1 -d=) - var_value=$(echo ${name_val_pair} | cut -f2 -d=) - - is_array="false" - if [ "${var_value:0:1}" = "(" ] && \ - [ "${var_value: -1}" = ")" ]; then - is_array="true" - fi -# -#----------------------------------------------------------------------- -# -# Make sure that the specified variable name is valid. -# -#----------------------------------------------------------------------- -# - iselementof "${var_name}" valid_var_names || { \ - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": -The specified variable name in the current variable name-and-value pair -is not valid: - name_val_pair = \"${name_val_pair}\" - var_name = \"${var_name}\" -var_name must be set to one of the following: - $valid_var_names_str -"; } -# -#----------------------------------------------------------------------- -# -# Loop through the list of valid variable names and find the one that -# the current name-value pair corresponds to. Then set that variable to -# the specified value. -# -#----------------------------------------------------------------------- -# - for (( i=0; i<${num_valid_var_names}; i++ )); do - - valid_var_name="${valid_var_names[$i]}" - if [ "${var_name}" = "${valid_var_name}" ]; then - - if [ "${valid_var_specified[$i]}" = "false" ]; then - valid_var_specified[$i]="true" - if [ "${is_array}" = "true" ]; then - eval ${var_name}=${var_value} - else - eval ${var_name}=\"${var_value}\" - fi - else - cmd_line=$( printf "\'%s\' " "${@:1}" ) - print_err_msg_exit "\ -The current variable has already been assigned a value on the command -line: - var_name = \"${var_name}\" - cmd_line = ${cmd_line} -Please assign values to variables only once on the command line. -" - fi - fi - - done - - done -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh index d5036abff2..4749e23b52 100644 --- a/ush/set_extrn_mdl_params.sh +++ b/ush/set_extrn_mdl_params.sh @@ -1,6 +1,36 @@ # #----------------------------------------------------------------------- # +# This file defines and then calls a function that sets parameters rela- +# ting to the external model used for initial conditions (ICs) and the +# one used for lateral boundary conditions (LBCs). +# +#----------------------------------------------------------------------- +# +function set_extrn_mdl_params() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Set the system directory (i.e. location on disk, not on HPSS) in which # the files generated by the external model specified by EXTRN_MDL_- # NAME_ICS that are necessary for generating initial condition (IC) @@ -46,8 +76,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -62,7 +91,7 @@ this machine and external model combination: EXTRN_MDL_FILES_SYSBASEDIR_ICS="/scratch4/NCEPDEV/rstprod/com/gfs/prod" ;; "HERA") - EXTRN_MDL_FILES_SYSBASEDIR_ICS="/scratch2/NCEPDEV/fv3-cam/noscrub/Eric.Rogers/prfv3rt1" + EXTRN_MDL_FILES_SYSBASEDIR_ICS="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" ;; "JET") EXTRN_MDL_FILES_SYSBASEDIR_ICS="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" @@ -76,8 +105,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -97,8 +125,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -118,8 +145,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -195,12 +221,11 @@ else ;; *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -216,7 +241,7 @@ else EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/NCEPDEV/rstprod/com/gfs/prod" ;; "HERA") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch2/NCEPDEV/fv3-cam/noscrub/Eric.Rogers/prfv3rt1" + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" ;; "JET") EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" @@ -226,12 +251,11 @@ else ;; *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -248,12 +272,11 @@ else ;; *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -273,12 +296,11 @@ else *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -288,3 +310,13 @@ else esac fi + +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +set_extrn_mdl_params diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index 0ca3fa6651..1b49d3885a 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -1,7 +1,186 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. +# +#----------------------------------------------------------------------- +# +# This file defines and then calls a function that sets the parameters +# for a grid that is to be generated using the "GFDLgrid" grid genera- +# tion method (i.e. GRID_GEN_METHOD set to "GFDLgrid"). +# +#----------------------------------------------------------------------- +# +function set_gridparams_GFDLgrid() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). +# +#----------------------------------------------------------------------- +# + local valid_args=( \ +"lon_of_t6_ctr" \ +"lat_of_t6_ctr" \ +"res_of_t6g" \ +"stretch_factor" \ +"refine_ratio_t6g_to_t7g" \ +"istart_of_t7_on_t6g" \ +"iend_of_t7_on_t6g" \ +"jstart_of_t7_on_t6g" \ +"jend_of_t7_on_t6g" \ +"output_varname_lon_of_t7_ctr" \ +"output_varname_lat_of_t7_ctr" \ +"output_varname_nx_of_t7_on_t7g" \ +"output_varname_ny_of_t7_on_t7g" \ +"output_varname_halo_width_on_t7g" \ +"output_varname_stretch_factor" \ +"output_varname_istart_of_t7_with_halo_on_t6sg" \ +"output_varname_iend_of_t7_with_halo_on_t6sg" \ +"output_varname_jstart_of_t7_with_halo_on_t6sg" \ +"output_varname_jend_of_t7_with_halo_on_t6sg" \ + ) + process_args valid_args "$@" +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local nx_of_t6_on_t6g \ + ny_of_t6_on_t6g \ + num_left_margin_cells_on_t6g \ + num_right_margin_cells_on_t6g \ + num_bot_margin_cells_on_t6g \ + num_top_margin_cells_on_t6g \ + lon_of_t7_ctr \ + lat_of_t7_ctr \ + istart_of_t7_on_t6sg \ + iend_of_t7_on_t6sg \ + jstart_of_t7_on_t6sg \ + jend_of_t7_on_t6sg \ + halo_width_on_t7g \ + halo_width_on_t6sg \ + istart_of_t7_with_halo_on_t6sg \ + iend_of_t7_with_halo_on_t6sg \ + jstart_of_t7_with_halo_on_t6sg \ + jend_of_t7_with_halo_on_t6sg \ + halo_width_on_t6sg \ + halo_width_on_t6g \ + halo_width_on_t7g \ + nx_of_t7_on_t6sg \ + nx_of_t7_on_t6g \ + nx_of_t7_on_t7g \ + ny_of_t7_on_t6sg \ + ny_of_t7_on_t6g \ + ny_of_t7_on_t7g \ + nx_of_t6_on_t6sg \ + ny_of_t6_on_t6sg \ + prime_factors_nx_of_t7_on_t7g \ + prime_factors_ny_of_t7_on_t7g \ + nx_of_t7_with_halo_on_t6sg \ + nx_of_t7_with_halo_on_t6g \ + nx_of_t7_with_halo_on_t7g \ + ny_of_t7_with_halo_on_t6sg \ + ny_of_t7_with_halo_on_t6g \ + ny_of_t7_with_halo_on_t7g +# +#----------------------------------------------------------------------- +# +# To simplify the grid setup, we require that tile 7 be centered on tile +# 6. Note that this is not really a restriction because tile 6 can al- +# ways be moved so that it is centered on tile 7 [the location of tile 6 +# doesn't really matter because for a regional setup, the forecast model +# will only run on tile 7 (not on tiles 1-6)]. +# +# We now check that tile 7 is centered on tile 6 by checking (1) that +# the number of cells (on tile 6) between the left boundaries of these +# two tiles is equal to that between their right boundaries and (2) that +# the number of cells (on tile 6) between the bottom boundaries of these +# two tiles is equal to that between their top boundaries. If not, we +# print out an error message and exit. If so, we set the longitude and +# latitude of the center of tile 7 to those of tile 6 and continue. +# +#----------------------------------------------------------------------- +# + nx_of_t6_on_t6g=${res_of_t6g} + ny_of_t6_on_t6g=${res_of_t6g} + + num_left_margin_cells_on_t6g=$(( istart_of_t7_on_t6g - 1 )) + num_right_margin_cells_on_t6g=$(( nx_of_t6_on_t6g - iend_of_t7_on_t6g )) + + if [ ${num_left_margin_cells_on_t6g} -ne ${num_right_margin_cells_on_t6g} ]; then + print_err_msg_exit "\ +In order for tile 7 to be centered in the x direction on tile 6, the x- +direction tile 6 cell indices at which tile 7 starts and ends (given by +istart_of_t7_on_t6g and iend_of_t7_on_t6g, respectively) must be set +such that the number of tile 6 cells in the margin between the left +boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is +equal to that in the margin between their right boundaries (given by +num_right_margin_cells_on_t6g): + istart_of_t7_on_t6g = ${istart_of_t7_on_t6g} + iend_of_t7_on_t6g = ${iend_of_t7_on_t6g} + num_left_margin_cells_on_t6g = ${num_left_margin_cells_on_t6g} + num_right_margin_cells_on_t6g = ${num_right_margin_cells_on_t6g} +Note that the total number of cells in the x-direction on tile 6 is gi- +ven by: + nx_of_t6_on_t6g = ${nx_of_t6_on_t6g} +Please reset istart_of_t7_on_t6g and iend_of_t7_on_t6g and rerun." + fi + + num_bot_margin_cells_on_t6g=$(( jstart_of_t7_on_t6g - 1 )) + num_top_margin_cells_on_t6g=$(( ny_of_t6_on_t6g - jend_of_t7_on_t6g )) + if [ ${num_bot_margin_cells_on_t6g} -ne ${num_top_margin_cells_on_t6g} ]; then + print_err_msg_exit "\ +In order for tile 7 to be centered in the y direction on tile 6, the y- +direction tile 6 cell indices at which tile 7 starts and ends (given by +jstart_of_t7_on_t6g and jend_of_t7_on_t6g, respectively) must be set +such that the number of tile 6 cells in the margin between the left +boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is +equal to that in the margin between their right boundaries (given by +num_right_margin_cells_on_t6g): + jstart_of_t7_on_t6g = ${jstart_of_t7_on_t6g} + jend_of_t7_on_t6g = ${jend_of_t7_on_t6g} + num_bot_margin_cells_on_t6g = ${num_bot_margin_cells_on_t6g} + num_top_margin_cells_on_t6g = ${num_top_margin_cells_on_t6g} +Note that the total number of cells in the y-direction on tile 6 is gi- +ven by: + ny_of_t6_on_t6g = ${ny_of_t6_on_t6g} +Please reset jstart_of_t7_on_t6g and jend_of_t7_on_t6g and rerun." + fi + + lon_of_t7_ctr="${lon_of_t6_ctr}" + lat_of_t7_ctr="${lat_of_t6_ctr}" # #----------------------------------------------------------------------- # @@ -14,28 +193,28 @@ # the number of cells in each direction on that tile's grid. We will # denote these index limits by # -# istart_rgnl_T6SG -# iend_rgnl_T6SG -# jstart_rgnl_T6SG -# jend_rgnl_T6SG +# istart_of_t7_on_t6sg +# iend_of_t7_on_t6sg +# jstart_of_t7_on_t6sg +# jend_of_t7_on_t6sg # # The "_T6SG" suffix in these names is used to indicate that the indices # are on the supergrid of tile 6. Recall, however, that we have as in- # puts the index limits of the regional grid on the tile 6 grid, not its # supergrid. These are given by # -# istart_rgnl_T6 -# iend_rgnl_T6 -# jstart_rgnl_T6 -# jend_rgnl_T6 +# istart_of_t7_on_t6g +# iend_of_t7_on_t6g +# jstart_of_t7_on_t6g +# jend_of_t7_on_t6g # # We can obtain the former from the latter by recalling that the super- # grid has twice the resolution of the original grid. Thus, # -# istart_rgnl_T6SG = 2*istart_rgnl_T6 - 1 -# iend_rgnl_T6SG = 2*iend_rgnl_T6 -# jstart_rgnl_T6SG = 2*jstart_rgnl_T6 - 1 -# jend_rgnl_T6SG = 2*jend_rgnl_T6 +# istart_of_t7_on_t6sg = 2*istart_of_t7_on_t6g - 1 +# iend_of_t7_on_t6sg = 2*iend_of_t7_on_t6g +# jstart_of_t7_on_t6sg = 2*jstart_of_t7_on_t6g - 1 +# jend_of_t7_on_t6sg = 2*jend_of_t7_on_t6g # # These are obtained assuming that grid cells on tile 6 must either be # completely within the regional domain or completely outside of it, @@ -47,10 +226,10 @@ # #----------------------------------------------------------------------- # -istart_rgnl_T6SG=$(( 2*$istart_rgnl_T6 - 1 )) -iend_rgnl_T6SG=$(( 2*$iend_rgnl_T6 )) -jstart_rgnl_T6SG=$(( 2*$jstart_rgnl_T6 - 1 )) -jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) + istart_of_t7_on_t6sg=$(( 2*istart_of_t7_on_t6g - 1 )) + iend_of_t7_on_t6sg=$(( 2*iend_of_t7_on_t6g )) + jstart_of_t7_on_t6sg=$(( 2*jstart_of_t7_on_t6g - 1 )) + jend_of_t7_on_t6sg=$(( 2*jend_of_t7_on_t6g )) # #----------------------------------------------------------------------- # @@ -60,10 +239,10 @@ jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) # we must pass to make_hgrid the index limits (on the tile 6 supergrid) # of the regional grid including a halo. We will let the variables # -# istart_rgnl_wide_halo_T6SG -# iend_rgnl_wide_halo_T6SG -# jstart_rgnl_wide_halo_T6SG -# jend_rgnl_wide_halo_T6SG +# istart_of_t7_with_halo_on_t6sg +# iend_of_t7_with_halo_on_t6sg +# jstart_of_t7_with_halo_on_t6sg +# jend_of_t7_with_halo_on_t6sg # # denote these limits. The reason we include "_wide_halo" in these va- # riable names is that the halo of the grid that we will first generate @@ -73,44 +252,44 @@ jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) # the model needs later on by "shaving" layers of cells from this wide- # halo grid. Next, we describe how to calculate the above indices. # -# Let nhw_T7 denote the width of the "wide" halo in units of number of +# Let halo_width_on_t7g denote the width of the "wide" halo in units of number of # grid cells on the regional grid (i.e. tile 7) that we'd like to have # along all four edges of the regional domain (left, right, bottom, and # top). To obtain the corresponding halo width in units of number of -# cells on the tile 6 grid -- which we denote by nhw_T6 -- we simply di- -# vide nhw_T7 by the refinement ratio, i.e. +# cells on the tile 6 grid -- which we denote by halo_width_on_t6g -- we simply di- +# vide halo_width_on_t7g by the refinement ratio, i.e. # -# nhw_T6 = nhw_T7/refine_ratio +# halo_width_on_t6g = halo_width_on_t7g/refine_ratio_t6g_to_t7g # # The corresponding halo width on the tile 6 supergrid is then given by # -# nhw_T6SG = 2*nhw_T6 -# = 2*nhw_T7/refine_ratio +# halo_width_on_t6sg = 2*halo_width_on_t6g +# = 2*halo_width_on_t7g/refine_ratio_t6g_to_t7g # -# Note that nhw_T6SG must be an integer, but the expression for it de- +# Note that halo_width_on_t6sg must be an integer, but the expression for it de- # rived above may not yield an integer. To ensure that the halo has a -# width of at least nhw_T7 cells on the regional grid, we round up the -# result of the expression above for nhw_T6SG, i.e. we redefine nhw_T6SG +# width of at least halo_width_on_t7g cells on the regional grid, we round up the +# result of the expression above for halo_width_on_t6sg, i.e. we redefine halo_width_on_t6sg # to be # -# nhw_T6SG = ceil(2*nhw_T7/refine_ratio) +# halo_width_on_t6sg = ceil(2*halo_width_on_t7g/refine_ratio_t6g_to_t7g) # # where ceil(...) is the ceiling function, i.e. it rounds its floating # point argument up to the next larger integer. Since in bash division # of two integers returns a truncated integer and since bash has no # built-in ceil(...) function, we perform the rounding-up operation by # adding the denominator (of the argument of ceil(...) above) minus 1 to -# the original numerator, i.e. by redefining nhw_T6SG to be +# the original numerator, i.e. by redefining halo_width_on_t6sg to be # -# nhw_T6SG = (2*nhw_T7 + refine_ratio - 1)/refine_ratio +# halo_width_on_t6sg = (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g # # This trick works when dividing one positive integer by another. # -# In order to calculate nhw_T6G using the above expression, we must -# first specify nhw_T7. Next, we specify an initial value for it by +# In order to calculate halo_width_on_t6g using the above expression, we must +# first specify halo_width_on_t7g. Next, we specify an initial value for it by # setting it to one more than the largest-width halo that the model ac- -# tually needs, which is nh4_T7. We then calculate nhw_T6SG using the -# above expression. Note that these values of nhw_T7 and nhw_T6SG will +# tually needs, which is NH4. We then calculate halo_width_on_t6sg using the +# above expression. Note that these values of halo_width_on_t7g and halo_width_on_t6sg will # likely not be their final values; their final values will be calcula- # ted later below after calculating the starting and ending indices of # the regional grid with wide halo on the tile 6 supergrid and then ad- @@ -118,30 +297,30 @@ jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) # #----------------------------------------------------------------------- # -nhw_T7=$(( $nh4_T7 + 1 )) -nhw_T6SG=$(( (2*nhw_T7 + refine_ratio - 1)/refine_ratio )) + halo_width_on_t7g=$(( NH4 + 1 )) + halo_width_on_t6sg=$(( (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g )) # #----------------------------------------------------------------------- # -# With an initial value of nhw_T6SG now available, we can obtain the +# With an initial value of halo_width_on_t6sg now available, we can obtain the # tile 6 supergrid index limits of the regional domain (including the # wide halo) from the index limits for the regional domain without a ha- -# lo by simply subtracting nhw_T6SG from the lower index limits and add- -# ing nhw_T6SG to the upper index limits, i.e. +# lo by simply subtracting halo_width_on_t6sg from the lower index limits and add- +# ing halo_width_on_t6sg to the upper index limits, i.e. # -# istart_rgnl_wide_halo_T6SG = istart_rgnl_T6SG - nhw_T6SG -# iend_rgnl_wide_halo_T6SG = iend_rgnl_T6SG + nhw_T6SG -# jstart_rgnl_wide_halo_T6SG = jstart_rgnl_T6SG - nhw_T6SG -# jend_rgnl_wide_halo_T6SG = jend_rgnl_T6SG + nhw_T6SG +# istart_of_t7_with_halo_on_t6sg = istart_of_t7_on_t6sg - halo_width_on_t6sg +# iend_of_t7_with_halo_on_t6sg = iend_of_t7_on_t6sg + halo_width_on_t6sg +# jstart_of_t7_with_halo_on_t6sg = jstart_of_t7_on_t6sg - halo_width_on_t6sg +# jend_of_t7_with_halo_on_t6sg = jend_of_t7_on_t6sg + halo_width_on_t6sg # # We calculate these next. # #----------------------------------------------------------------------- # -istart_rgnl_wide_halo_T6SG=$(( $istart_rgnl_T6SG - $nhw_T6SG )) -iend_rgnl_wide_halo_T6SG=$(( $iend_rgnl_T6SG + $nhw_T6SG )) -jstart_rgnl_wide_halo_T6SG=$(( $jstart_rgnl_T6SG - $nhw_T6SG )) -jend_rgnl_wide_halo_T6SG=$(( $jend_rgnl_T6SG + $nhw_T6SG )) + istart_of_t7_with_halo_on_t6sg=$(( istart_of_t7_on_t6sg - halo_width_on_t6sg )) + iend_of_t7_with_halo_on_t6sg=$(( iend_of_t7_on_t6sg + halo_width_on_t6sg )) + jstart_of_t7_with_halo_on_t6sg=$(( jstart_of_t7_on_t6sg - halo_width_on_t6sg )) + jend_of_t7_with_halo_on_t6sg=$(( jend_of_t7_on_t6sg + halo_width_on_t6sg )) # #----------------------------------------------------------------------- # @@ -152,26 +331,28 @@ jend_rgnl_wide_halo_T6SG=$(( $jend_rgnl_T6SG + $nhw_T6SG )) # starting indices on the tile 6 supergrid of the grid with wide halo # must be odd while the ending indices must be even. Thus, below, we # subtract 1 from the starting indices if they are even (which ensures -# that there will be at least nhw_T7 halo cells along the left and bot- +# that there will be at least halo_width_on_t7g halo cells along the left and bot- # tom boundaries), and we add 1 to the ending indices if they are odd -# (which ensures that there will be at least nhw_T7 halo cells along the +# (which ensures that there will be at least halo_width_on_t7g halo cells along the # right and top boundaries). # #----------------------------------------------------------------------- # -if [ $(( istart_rgnl_wide_halo_T6SG%2 )) -eq 0 ]; then - istart_rgnl_wide_halo_T6SG=$(( istart_rgnl_wide_halo_T6SG - 1 )) -fi -if [ $(( iend_rgnl_wide_halo_T6SG%2 )) -eq 1 ]; then - iend_rgnl_wide_halo_T6SG=$(( iend_rgnl_wide_halo_T6SG + 1 )) -fi + if [ $(( istart_of_t7_with_halo_on_t6sg%2 )) -eq 0 ]; then + istart_of_t7_with_halo_on_t6sg=$(( istart_of_t7_with_halo_on_t6sg - 1 )) + fi + + if [ $(( iend_of_t7_with_halo_on_t6sg%2 )) -eq 1 ]; then + iend_of_t7_with_halo_on_t6sg=$(( iend_of_t7_with_halo_on_t6sg + 1 )) + fi + + if [ $(( jstart_of_t7_with_halo_on_t6sg%2 )) -eq 0 ]; then + jstart_of_t7_with_halo_on_t6sg=$(( jstart_of_t7_with_halo_on_t6sg - 1 )) + fi -if [ $(( jstart_rgnl_wide_halo_T6SG%2 )) -eq 0 ]; then - jstart_rgnl_wide_halo_T6SG=$(( jstart_rgnl_wide_halo_T6SG - 1 )) -fi -if [ $(( jend_rgnl_wide_halo_T6SG%2 )) -eq 1 ]; then - jend_rgnl_wide_halo_T6SG=$(( jend_rgnl_wide_halo_T6SG + 1 )) -fi + if [ $(( jend_of_t7_with_halo_on_t6sg%2 )) -eq 1 ]; then + jend_of_t7_with_halo_on_t6sg=$(( jend_of_t7_with_halo_on_t6sg + 1 )) + fi # #----------------------------------------------------------------------- # @@ -180,7 +361,7 @@ fi # #----------------------------------------------------------------------- # -{ save_shell_opts; set +x; } > /dev/null 2>&1 + { save_shell_opts; set +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -197,119 +378,135 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ + print_info_msg "$VERBOSE" " Original values of the halo width on the tile 6 supergrid and on the tile 7 grid are: - nhw_T6SG = $nhw_T6SG - nhw_T7 = $nhw_T7" + halo_width_on_t6sg = ${halo_width_on_t6sg} + halo_width_on_t7g = ${halo_width_on_t7g}" -nhw_T6SG=$(( $istart_rgnl_T6SG - $istart_rgnl_wide_halo_T6SG )) -nhw_T6=$(( $nhw_T6SG/2 )) -nhw_T7=$(( $nhw_T6*$refine_ratio )) + halo_width_on_t6sg=$(( istart_of_t7_on_t6sg - istart_of_t7_with_halo_on_t6sg )) + halo_width_on_t6g=$(( halo_width_on_t6sg/2 )) + halo_width_on_t7g=$(( halo_width_on_t6g*refine_ratio_t6g_to_t7g )) -print_info_msg_verbose "\ + print_info_msg "$VERBOSE" " Values of the halo width on the tile 6 supergrid and on the tile 7 grid AFTER adjustments are: - nhw_T6SG = $nhw_T6SG - nhw_T7 = $nhw_T7" + halo_width_on_t6sg = ${halo_width_on_t6sg} + halo_width_on_t7g = ${halo_width_on_t7g}" # #----------------------------------------------------------------------- # # Calculate the number of cells that the regional domain (without halo) # has in each of the two horizontal directions (say x and y). We denote -# these by nx_T7 and ny_T7, respectively. These will be needed in the -# "shave" steps later below. +# these by nx_of_t7_on_t7g and ny_of_t7_on_t7g, respectively. These +# will be needed in the "shave" steps in the grid generation task of the +# workflow. # #----------------------------------------------------------------------- # -nx_rgnl_T6SG=$(( $iend_rgnl_T6SG - $istart_rgnl_T6SG + 1 )) -nx_rgnl_T6=$(( $nx_rgnl_T6SG/2 )) -nx_T7=$(( $nx_rgnl_T6*$refine_ratio )) + nx_of_t7_on_t6sg=$(( iend_of_t7_on_t6sg - istart_of_t7_on_t6sg + 1 )) + nx_of_t7_on_t6g=$(( nx_of_t7_on_t6sg/2 )) + nx_of_t7_on_t7g=$(( nx_of_t7_on_t6g*refine_ratio_t6g_to_t7g )) -ny_rgnl_T6SG=$(( $jend_rgnl_T6SG - $jstart_rgnl_T6SG + 1 )) -ny_rgnl_T6=$(( $ny_rgnl_T6SG/2 )) -ny_T7=$(( $ny_rgnl_T6*$refine_ratio )) + ny_of_t7_on_t6sg=$(( jend_of_t7_on_t6sg - jstart_of_t7_on_t6sg + 1 )) + ny_of_t7_on_t6g=$(( ny_of_t7_on_t6sg/2 )) + ny_of_t7_on_t7g=$(( ny_of_t7_on_t6g*refine_ratio_t6g_to_t7g )) # # The following are set only for informational purposes. # -nx_T6=$RES -ny_T6=$RES -nx_T6SG=$(( $nx_T6*2 )) -ny_T6SG=$(( $ny_T6*2 )) + nx_of_t6_on_t6sg=$(( 2*nx_of_t6_on_t6g )) + ny_of_t6_on_t6sg=$(( 2*ny_of_t6_on_t6g )) -prime_factors_nx_T7=$( factor $nx_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -prime_factors_ny_T7=$( factor $ny_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) + prime_factors_nx_of_t7_on_t7g=$( factor ${nx_of_t7_on_t7g} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) + prime_factors_ny_of_t7_on_t7g=$( factor ${ny_of_t7_on_t7g} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -print_info_msg_verbose "\ + print_info_msg "$VERBOSE" " The number of cells in the two horizontal directions (x and y) on the parent tile's (tile 6) grid and supergrid are: - nx_T6 = $nx_T6 - ny_T6 = $ny_T6 - nx_T6SG = $nx_T6SG - ny_T6SG = $ny_T6SG + nx_of_t6_on_t6g = ${nx_of_t6_on_t6g} + ny_of_t6_on_t6g = ${ny_of_t6_on_t6g} + nx_of_t6_on_t6sg = ${nx_of_t6_on_t6sg} + ny_of_t6_on_t6sg = ${ny_of_t6_on_t6sg} The number of cells in the two horizontal directions on the tile 6 grid -and supergrid that the regional domain (tile 7) WITHOUT A HALO encompasses -are: - nx_rgnl_T6 = $nx_rgnl_T6 - ny_rgnl_T6 = $ny_rgnl_T6 - nx_rgnl_T6SG = $nx_rgnl_T6SG - ny_rgnl_T6SG = $ny_rgnl_T6SG +and supergrid that the regional domain (tile 7) WITHOUT A HALO encompas- +ses are: + nx_of_t7_on_t6g = ${nx_of_t7_on_t6g} + ny_of_t7_on_t6g = ${ny_of_t7_on_t6g} + nx_of_t7_on_t6sg = ${nx_of_t7_on_t6sg} + ny_of_t7_on_t6sg = ${ny_of_t7_on_t6sg} -The starting and ending i and j indices on the tile 6 grid used to -generate this regional grid are: - istart_rgnl_T6 = $istart_rgnl_T6 - iend_rgnl_T6 = $iend_rgnl_T6 - jstart_rgnl_T6 = $jstart_rgnl_T6 - jend_rgnl_T6 = $jend_rgnl_T6 +The starting and ending i and j indices on the tile 6 grid used to gene- +rate this regional grid are: + istart_of_t7_on_t6g = ${istart_of_t7_on_t6g} + iend_of_t7_on_t6g = ${iend_of_t7_on_t6g} + jstart_of_t7_on_t6g = ${jstart_of_t7_on_t6g} + jend_of_t7_on_t6g = ${jend_of_t7_on_t6g} -The corresponding starting and ending i and j indices on the tile 6 -supergrid are: - istart_rgnl_T6SG = $istart_rgnl_T6SG - iend_rgnl_T6SG = $iend_rgnl_T6SG - jstart_rgnl_T6SG = $jstart_rgnl_T6SG - jend_rgnl_T6SG = $jend_rgnl_T6SG +The corresponding starting and ending i and j indices on the tile 6 su- +pergrid are: + istart_of_t7_on_t6sg = ${istart_of_t7_on_t6sg} + iend_of_t7_on_t6sg = ${iend_of_t7_on_t6sg} + jstart_of_t7_on_t6sg = ${jstart_of_t7_on_t6sg} + jend_of_t7_on_t6sg = ${jend_of_t7_on_t6sg} The refinement ratio (ratio of the number of cells in tile 7 that abut a single cell in tile 6) is: - refine_ratio = $refine_ratio + refine_ratio_t6g_to_t7g = ${refine_ratio_t6g_to_t7g} -The number of cells in the two horizontal directions on the regional -tile's/domain's (tile 7) grid WITHOUT A HALO are: - nx_T7 = $nx_T7 - ny_T7 = $ny_T7 +The number of cells in the two horizontal directions on the regional do- +main's (i.e. tile 7's) grid WITHOUT A HALO are: + nx_of_t7_on_t7g = ${nx_of_t7_on_t7g} + ny_of_t7_on_t7g = ${ny_of_t7_on_t7g} -The prime factors of nx_T7 and ny_T7 are (useful for determining an MPI -task layout, i.e. layout_x and layout_y): - prime_factors_nx_T7: $prime_factors_nx_T7 - prime_factors_ny_T7: $prime_factors_ny_T7" +The prime factors of nx_of_t7_on_t7g and ny_of_t7_on_t7g are (useful for +determining an MPI task layout): + prime_factors_nx_of_t7_on_t7g: ${prime_factors_nx_of_t7_on_t7g} + prime_factors_ny_of_t7_on_t7g: ${prime_factors_ny_of_t7_on_t7g}" # #----------------------------------------------------------------------- # # For informational purposes, calculate the number of cells in each di- -# rection on the regional grid that includes the wide halo (of width -# nhw_T7 cells). We denote these by nx_wide_halo_T7 and ny_wide_halo_- -# T7, respectively. +# rection on the regional grid including the wide halo (of width halo_- +# width_on_t7g cells). We denote these by nx_of_t7_with_halo_on_t7g and +# ny_of_t7_with_halo_on_t7g, respectively. # #----------------------------------------------------------------------- # -nx_wide_halo_T6SG=$(( $iend_rgnl_wide_halo_T6SG - $istart_rgnl_wide_halo_T6SG + 1 )) -nx_wide_halo_T6=$(( $nx_wide_halo_T6SG/2 )) -nx_wide_halo_T7=$(( $nx_wide_halo_T6*$refine_ratio )) + nx_of_t7_with_halo_on_t6sg=$(( iend_of_t7_with_halo_on_t6sg - istart_of_t7_with_halo_on_t6sg + 1 )) + nx_of_t7_with_halo_on_t6g=$(( nx_of_t7_with_halo_on_t6sg/2 )) + nx_of_t7_with_halo_on_t7g=$(( nx_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) -ny_wide_halo_T6SG=$(( $jend_rgnl_wide_halo_T6SG - $jstart_rgnl_wide_halo_T6SG + 1 )) -ny_wide_halo_T6=$(( $ny_wide_halo_T6SG/2 )) -ny_wide_halo_T7=$(( $ny_wide_halo_T6*$refine_ratio )) + ny_of_t7_with_halo_on_t6sg=$(( jend_of_t7_with_halo_on_t6sg - jstart_of_t7_with_halo_on_t6sg + 1 )) + ny_of_t7_with_halo_on_t6g=$(( ny_of_t7_with_halo_on_t6sg/2 )) + ny_of_t7_with_halo_on_t7g=$(( ny_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) -print_info_msg_verbose "\ -nx_wide_halo_T7 = $nx_T7 \ -(istart_rgnl_wide_halo_T6SG = $istart_rgnl_wide_halo_T6SG, \ -iend_rgnl_wide_halo_T6SG = $iend_rgnl_wide_halo_T6SG)" + print_info_msg "$VERBOSE" " +nx_of_t7_with_halo_on_t7g = ${nx_of_t7_with_halo_on_t7g} \ +(istart_of_t7_with_halo_on_t6sg = ${istart_of_t7_with_halo_on_t6sg}, \ +iend_of_t7_with_halo_on_t6sg = ${iend_of_t7_with_halo_on_t6sg})" -print_info_msg_verbose "\ -ny_wide_halo_T7 = $ny_T7 \ -(jstart_rgnl_wide_halo_T6SG = $jstart_rgnl_wide_halo_T6SG, \ -jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" + print_info_msg "$VERBOSE" " +ny_of_t7_with_halo_on_t7g = ${ny_of_t7_with_halo_on_t7g} \ +(jstart_of_t7_with_halo_on_t6sg = ${jstart_of_t7_with_halo_on_t6sg}, \ +jend_of_t7_with_halo_on_t6sg = ${jend_of_t7_with_halo_on_t6sg})" +# +#----------------------------------------------------------------------- +# +# Set output variables. +# +#----------------------------------------------------------------------- +# + eval ${output_varname_lon_of_t7_ctr}="${lon_of_t7_ctr}" + eval ${output_varname_lat_of_t7_ctr}="${lat_of_t7_ctr}" + eval ${output_varname_nx_of_t7_on_t7g}="${nx_of_t7_on_t7g}" + eval ${output_varname_ny_of_t7_on_t7g}="${ny_of_t7_on_t7g}" + eval ${output_varname_halo_width_on_t7g}="${halo_width_on_t7g}" + eval ${output_varname_stretch_factor}="${stretch_factor}" + eval ${output_varname_istart_of_t7_with_halo_on_t6sg}="${istart_of_t7_with_halo_on_t6sg}" + eval ${output_varname_iend_of_t7_with_halo_on_t6sg}="${iend_of_t7_with_halo_on_t6sg}" + eval ${output_varname_jstart_of_t7_with_halo_on_t6sg}="${jstart_of_t7_with_halo_on_t6sg}" + eval ${output_varname_jend_of_t7_with_halo_on_t6sg}="${jend_of_t7_with_halo_on_t6sg}" # #----------------------------------------------------------------------- # @@ -317,7 +514,7 @@ jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 - + { restore_shell_opts; } > /dev/null 2>&1 +} diff --git a/ush/set_gridparams_JPgrid.sh b/ush/set_gridparams_JPgrid.sh index 21755d52ac..c8c52d425d 100644 --- a/ush/set_gridparams_JPgrid.sh +++ b/ush/set_gridparams_JPgrid.sh @@ -1,7 +1,84 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. - +# +#----------------------------------------------------------------------- +# +# This file defines and then calls a function that sets the parameters +# for a grid that is to be generated using the "JPgrid" grid generation +# method (i.e. GRID_GEN_METHOD set to "JPgrid"). +# +#----------------------------------------------------------------------- +# +function set_gridparams_JPgrid() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). +# +#----------------------------------------------------------------------- +# + local valid_args=( \ +"lon_ctr" \ +"lat_ctr" \ +"nx" \ +"ny" \ +"halo_width" \ +"delx" \ +"dely" \ +"alpha" \ +"kappa" \ +"output_varname_lon_ctr" \ +"output_varname_lat_ctr" \ +"output_varname_nx" \ +"output_varname_ny" \ +"output_varname_halo_width" \ +"output_varname_stretch_factor" \ +"output_varname_del_angle_x_sg" \ +"output_varname_del_angle_y_sg" \ +"output_varname_neg_nx_of_dom_with_wide_halo" \ +"output_varname_neg_ny_of_dom_with_wide_halo" \ + ) + process_args valid_args "$@" +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# + print_input_args valid_args # #----------------------------------------------------------------------- # @@ -10,33 +87,88 @@ # #----------------------------------------------------------------------- # -. ${USHDIR}/constants.sh -echo -echo "pi_geom = $pi_geom" -echo "degs_per_radian = $degs_per_radian" -echo "radius_Earth = $radius_Earth" + . ${USHDIR}/constants.sh # #----------------------------------------------------------------------- # +# Declare local variables. +# +#----------------------------------------------------------------------- # + local stretch_factor \ + del_angle_x_sg \ + del_angle_y_sg \ + neg_nx_of_dom_with_wide_halo \ + neg_ny_of_dom_with_wide_halo # #----------------------------------------------------------------------- # -del_angle_x_SG=$( bc -l <<< "($delx/(2.0*$radius_Earth))*$degs_per_radian" ) -del_angle_x_SG=$( printf "%0.10f\n" $del_angle_x_SG ) - -del_angle_y_SG=$( bc -l <<< "($dely/(2.0*$radius_Earth))*$degs_per_radian" ) -del_angle_y_SG=$( printf "%0.10f\n" $del_angle_y_SG ) +# For a JPgrid-type grid, the orography filtering is performed by pass- +# ing to the orography filtering the parameters for an "equivalent" glo- +# bal uniform cubed-sphere grid. These are the parameters that a global +# uniform cubed-sphere grid needs to have in order to have a nominal +# grid cell size equal to that of the (average) cell size on the region- +# al grid. These globally-equivalent parameters include a resolution +# (in units of number of cells in each of the two horizontal directions) +# and a stretch factor. The equivalent resolution is calculated in the +# script that generates the grid, and the stretch factor needs to be set +# to 1 because we are considering an equivalent globally UNIFORM grid. +# However, it turns out that with a non-symmetric regional grid (one in +# which nx is not equal to ny), setting stretch_factor to 1 fails be- +# cause the orography filtering program is designed for a global cubed- +# sphere grid and thus assumes that nx and ny for a given tile are equal +# when stretch_factor is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when the stretch factor is not equal to 1. +# It turns out that the program will work if we set stretch_factor to a +# value that is not exactly 1. This is what we do below. +# +#----------------------------------------------------------------------- +# + stretch_factor="0.999" # Check whether the orography program has been fixed so that we can set this to 1... +# +#----------------------------------------------------------------------- +# +# Set parameters needed as inputs to the regional_grid grid generation +# code. +# +#----------------------------------------------------------------------- +# + del_angle_x_sg=$( bc -l <<< "(${delx}/(2.0*${radius_Earth}))*${degs_per_radian}" ) + del_angle_x_sg=$( printf "%0.10f\n" ${del_angle_x_sg} ) -echo "del_angle_x_SG = $del_angle_x_SG" -echo "del_angle_y_SG = $del_angle_y_SG" + del_angle_y_sg=$( bc -l <<< "(${dely}/(2.0*${radius_Earth}))*${degs_per_radian}" ) + del_angle_y_sg=$( printf "%0.10f\n" ${del_angle_y_sg} ) -mns_nx_T7_pls_wide_halo=$( bc -l <<< "-($nx_T7 + 2*$nhw_T7)" ) -mns_nx_T7_pls_wide_halo=$( printf "%.0f\n" $mns_nx_T7_pls_wide_halo ) -echo "mns_nx_T7_pls_wide_halo = $mns_nx_T7_pls_wide_halo" + neg_nx_of_dom_with_wide_halo=$( bc -l <<< "-($nx + 2*${halo_width})" ) + neg_nx_of_dom_with_wide_halo=$( printf "%.0f\n" ${neg_nx_of_dom_with_wide_halo} ) -mns_ny_T7_pls_wide_halo=$( bc -l <<< "-($ny_T7 + 2*$nhw_T7)" ) -mns_ny_T7_pls_wide_halo=$( printf "%.0f\n" $mns_ny_T7_pls_wide_halo ) -echo "mns_ny_T7_pls_wide_halo = $mns_ny_T7_pls_wide_halo" + neg_ny_of_dom_with_wide_halo=$( bc -l <<< "-($ny + 2*${halo_width})" ) + neg_ny_of_dom_with_wide_halo=$( printf "%.0f\n" ${neg_ny_of_dom_with_wide_halo} ) +# +#----------------------------------------------------------------------- +# +# Set output variables. +# +#----------------------------------------------------------------------- +# + eval ${output_varname_lon_ctr}="${lon_ctr}" + eval ${output_varname_lat_ctr}="${lat_ctr}" + eval ${output_varname_nx}="${nx}" + eval ${output_varname_ny}="${ny}" + eval ${output_varname_halo_width}="${halo_width}" + eval ${output_varname_stretch_factor}="${stretch_factor}" + eval ${output_varname_del_angle_x_sg}="${del_angle_x_sg}" + eval ${output_varname_del_angle_y_sg}="${del_angle_y_sg}" + eval ${output_varname_neg_nx_of_dom_with_wide_halo}="${neg_nx_of_dom_with_wide_halo}" + eval ${output_varname_neg_ny_of_dom_with_wide_halo}="${neg_ny_of_dom_with_wide_halo}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 +} diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 766c20d9a9..753115b137 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -1,19 +1,47 @@ # #----------------------------------------------------------------------- # +# This file defines and then calls a function that sets grid parameters +# for the specified predefined grid. +# +#----------------------------------------------------------------------- +# +function set_predef_grid_params() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Set grid and other parameters according to the value of the predefined -# domain (PREDEF_GRID_NAME). Note that the code will enter this script on- -# ly if PREDEF_GRID_NAME has a valid (and non-empty) value. +# domain (PREDEF_GRID_NAME). Note that the code will enter this script +# only if PREDEF_GRID_NAME has a valid (and non-empty) value. # # The following needs to be updated: # # 1) Reset the experiment title (expt_title). # 2) Reset the grid parameters. # 3) If the write component is to be used (i.e. QUILTING is set to -# "TRUE") and the variable WRTCMP_PARAMS_TEMPLATE_FN containing the -# name of the write-component template file is unset or empty, set -# that filename variable to the appropriate preexisting template -# file. +# "TRUE") and the variable WRTCMP_PARAMS_TMPL_FN containing the name +# of the write-component template file is unset or empty, set that +# filename variable to the appropriate preexisting template file. # # For the predefined domains, we determine the starting and ending indi- # ces of the regional grid within tile 6 by specifying margins (in units @@ -60,42 +88,39 @@ case ${PREDEF_GRID_NAME} in The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-62.0 - lat_rgnl_ctr=22.0 + JPgrid_LON_CTR=-62.0 + JPgrid_LAT_CTR=22.0 - delx="3000.0" - dely="3000.0" + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" - nx_T7=2880 - ny_T7=1920 + JPgrid_NX=2880 + JPgrid_NY=1920 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="100" + DT_ATMOS="40" - layout_x="32" - layout_y="24" - blocksize="32" + LAYOUT_X="32" + LAYOUT_Y="24" + BLOCKSIZE="32" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="32" - WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" - WRTCMP_nx="2937" - WRTCMP_ny="1788" - WRTCMP_lon_lwr_left="-97.83959" - WRTCMP_lat_lwr_left="-5.67929305" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_output_grid="regional_latlon" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="25.0" + WRTCMP_lon_lwr_left="-114.5" + WRTCMP_lat_lwr_left="-5.0" + WRTCMP_lon_upr_rght="-9.5" + WRTCMP_lat_upr_rght="55.0" + WRTCMP_dlon="0.03" + WRTCMP_dlat="0.03" fi fi @@ -115,42 +140,39 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-62.0 - lat_rgnl_ctr=22.0 + JPgrid_LON_CTR=-62.0 + JPgrid_LAT_CTR=22.0 - delx="13000.0" - dely="13000.0" + JPgrid_DELX="13000.0" + JPgrid_DELY="13000.0" - nx_T7=665 - ny_T7=444 + JPgrid_NX=665 + JPgrid_NY=444 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="180" + DT_ATMOS="180" - layout_x="19" - layout_y="12" - blocksize="35" + LAYOUT_X="19" + LAYOUT_Y="12" + BLOCKSIZE="35" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" - WRTCMP_write_tasks_per_group="12" - WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" - WRTCMP_nx="658" - WRTCMP_ny="412" - WRTCMP_lon_lwr_left="-98.0" - WRTCMP_lat_lwr_left="-5.33" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_write_tasks_per_group="32" + WRTCMP_output_grid="regional_latlon" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="25.0" + WRTCMP_lon_lwr_left="-114.5" + WRTCMP_lat_lwr_left="-5.0" + WRTCMP_lon_upr_rght="-9.5" + WRTCMP_lat_upr_rght="55.0" + WRTCMP_dlon="0.13" + WRTCMP_dlat="0.13" fi fi @@ -170,42 +192,39 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-62.0 - lat_rgnl_ctr=22.0 + JPgrid_LON_CTR=-62.0 + JPgrid_LAT_CTR=22.0 - delx="25000.0" - dely="25000.0" + JPgrid_DELX="25000.0" + JPgrid_DELY="25000.0" - nx_T7=345 - ny_T7=230 + JPgrid_NX=345 + JPgrid_NY=230 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="300" + DT_ATMOS="300" - layout_x="5" - layout_y="5" - blocksize="6" + LAYOUT_X="5" + LAYOUT_Y="5" + BLOCKSIZE="6" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" - WRTCMP_write_tasks_per_group="10" - WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" - WRTCMP_nx="337" - WRTCMP_ny="210" - WRTCMP_lon_lwr_left="-98.0" - WRTCMP_lat_lwr_left="-4.5" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_write_tasks_per_group="32" + WRTCMP_output_grid="regional_latlon" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="25.0" + WRTCMP_lon_lwr_left="-114.5" + WRTCMP_lat_lwr_left="-5.0" + WRTCMP_lon_upr_rght="-9.5" + WRTCMP_lat_upr_rght="55.0" + WRTCMP_dlon="0.25" + WRTCMP_dlat="0.25" fi fi @@ -221,36 +240,38 @@ predefined domain: if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - lon_ctr_T6=-106.0 - lat_ctr_T6=54.0 - stretch_fac=0.63 - RES="384" - refine_ratio=3 + GFDLgrid_LON_T6_CTR=-106.0 + GFDLgrid_LAT_T6_CTR=54.0 + GFDLgrid_STRETCH_FAC=0.63 + GFDLgrid_RES="384" + GFDLgrid_REFINE_RATIO=3 num_margin_cells_T6_left=10 - istart_rgnl_T6=$(( $num_margin_cells_T6_left + 1 )) + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=10 - iend_rgnl_T6=$(( $RES - $num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=10 - jstart_rgnl_T6=$(( $num_margin_cells_T6_bottom + 1 )) + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=10 - jend_rgnl_T6=$(( $RES - $num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) + + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="FALSE" - dt_atmos="90" + DT_ATMOS="90" - layout_x="14" - layout_y="14" - blocksize="26" + LAYOUT_X="14" + LAYOUT_Y="14" + BLOCKSIZE="26" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="14" WRTCMP_output_grid="rotated_latlon" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" WRTCMP_lon_lwr_left="-57.9926" WRTCMP_lat_lwr_left="-50.74344" WRTCMP_lon_upr_rght="57.99249" @@ -261,29 +282,29 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-106.0 - lat_rgnl_ctr=54.0 + JPgrid_LON_CTR=-106.0 + JPgrid_LAT_CTR=54.0 - delx="13000.0" - dely="13000.0" + JPgrid_DELX="13000.0" + JPgrid_DELY="13000.0" - nx_T7=960 - ny_T7=960 + JPgrid_NX=960 + JPgrid_NY=960 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="90" + DT_ATMOS="90" - layout_x="16" - layout_y="16" - blocksize="30" + LAYOUT_X="16" + LAYOUT_Y="16" + BLOCKSIZE="30" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="16" WRTCMP_output_grid="rotated_latlon" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" WRTCMP_lon_lwr_left="-57.9926" WRTCMP_lat_lwr_left="-50.74344" WRTCMP_lon_upr_rght="57.99249" @@ -309,42 +330,41 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - delx="25000.0" - dely="25000.0" + JPgrid_DELX="25000.0" + JPgrid_DELY="25000.0" - nx_T7=200 - ny_T7=110 + JPgrid_NX=200 + JPgrid_NY=110 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="300" + DT_ATMOS="300" - layout_x="2" - layout_y="2" - blocksize="2" + LAYOUT_X="2" + LAYOUT_Y="2" + BLOCKSIZE="2" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="2" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="191" WRTCMP_ny="97" WRTCMP_lon_lwr_left="-120.72962370" WRTCMP_lat_lwr_left="25.11648583" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -364,42 +384,41 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - delx="13000.0" - dely="13000.0" + JPgrid_DELX="13000.0" + JPgrid_DELY="13000.0" - nx_T7=390 - ny_T7=210 + JPgrid_NX=390 + JPgrid_NY=210 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="180" + DT_ATMOS="180" - layout_x="10" - layout_y="10" - blocksize="39" + LAYOUT_X="10" + LAYOUT_Y="10" + BLOCKSIZE="39" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="10" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="383" WRTCMP_ny="195" WRTCMP_lon_lwr_left="-121.58647982" WRTCMP_lat_lwr_left="24.36006861" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -419,42 +438,41 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - delx="3000.0" - dely="3000.0" + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" - nx_T7=1734 - ny_T7=1008 + JPgrid_NX=1734 + JPgrid_NY=1008 - nhw_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 - dt_atmos="100" + DT_ATMOS="40" - layout_x="34" - layout_y="24" - blocksize="34" + LAYOUT_X="34" + LAYOUT_Y="24" + BLOCKSIZE="34" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="24" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="1738" WRTCMP_ny="974" WRTCMP_lon_lwr_left="-122.21414225" WRTCMP_lat_lwr_left="22.41403305" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -462,11 +480,11 @@ predefined domain: # #----------------------------------------------------------------------- # -# EMC's CONUS grid. +# EMC's 3km CONUS grid. # #----------------------------------------------------------------------- # -"EMC_CONUS") +"EMC_CONUS_3km") if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # Values from an EMC script. @@ -504,47 +522,125 @@ predefined domain: #dlat=0.02 - lon_ctr_T6=-97.5 - lat_ctr_T6=38.5 - stretch_fac=1.5 - RES="768" - refine_ratio=3 + GFDLgrid_LON_T6_CTR=-97.5 + GFDLgrid_LAT_T6_CTR=38.5 + GFDLgrid_STRETCH_FAC=1.5 + GFDLgrid_RES="768" + GFDLgrid_REFINE_RATIO=3 num_margin_cells_T6_left=61 - istart_rgnl_T6=$(( $num_margin_cells_T6_left + 1 )) + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - iend_rgnl_T6=$(( $RES - $num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 - jstart_rgnl_T6=$(( $num_margin_cells_T6_bottom + 1 )) + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - jend_rgnl_T6=$(( $RES - $num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) - dt_atmos="18" + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" - layout_x="16" - layout_y="72" - write_tasks_per_group="72" - blocksize=32 + DT_ATMOS="18" + LAYOUT_X="16" + LAYOUT_Y="72" + BLOCKSIZE=32 + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# GSK - The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.02" + WRTCMP_dlat="0.02" + fi elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 + + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" + + JPgrid_NX=960 + JPgrid_NY=960 + + JPgrid_WIDE_HALO_WIDTH=6 + + fi + ;; +# +#----------------------------------------------------------------------- +# +# EMC's coarse (?? km) CONUS grid. +# +#----------------------------------------------------------------------- +# +"EMC_CONUS_coarse") + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + + GFDLgrid_LON_T6_CTR=-97.5 + GFDLgrid_LAT_T6_CTR=38.5 + GFDLgrid_STRETCH_FAC=1.5 + GFDLgrid_RES="96" + GFDLgrid_REFINE_RATIO=2 + + num_margin_cells_T6_left=9 + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) + + num_margin_cells_T6_right=9 + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) + + num_margin_cells_T6_bottom=9 + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) + + num_margin_cells_T6_top=9 + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) - delx="3000.0" - dely="3000.0" + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" - nx_T7=960 - ny_T7=960 + DT_ATMOS="100" + + LAYOUT_X="6" + LAYOUT_Y="6" + BLOCKSIZE="26" + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# GSK - The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.24" + WRTCMP_dlat="0.24" + fi + + elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - nhw_T7=6 + print_err_msg_exit "\ +The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this +predefined domain: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" fi ;; + # #----------------------------------------------------------------------- # @@ -560,8 +656,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" # Values from an EMC script. @@ -597,72 +692,162 @@ predefined domain: #dlon=0.03 #dlat=0.03 - lon_ctr_T6=-153.0 - lat_ctr_T6=61.0 - stretch_fac=1.0 # ??? - RES="768" - refine_ratio=3 # ??? + GFDLgrid_LON_T6_CTR=-153.0 + GFDLgrid_LAT_T6_CTR=61.0 + GFDLgrid_STRETCH_FAC=1.0 # ??? + GFDLgrid_RES="768" + GFDLgrid_REFINE_RATIO=3 # ??? num_margin_cells_T6_left=61 - istart_rgnl_T6=$(( $num_margin_cells_T6_left + 1 )) + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - iend_rgnl_T6=$(( $RES - $num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 - jstart_rgnl_T6=$(( $num_margin_cells_T6_bottom + 1 )) + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - jend_rgnl_T6=$(( $RES - $num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) - dt_atmos="18" + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" - layout_x="16" - layout_y="48" - write_groups="2" - write_tasks_per_group="24" - blocksize=32 + DT_ATMOS="18" + + LAYOUT_X="16" + LAYOUT_Y="48" + WRTCMP_write_groups="2" + WRTCMP_write_tasks_per_group="24" + BLOCKSIZE=32 elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then print_err_msg_exit "\ The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this +predefined domain: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" + + fi + ;; +# +#----------------------------------------------------------------------- +# +# 3-km HRRR Alaska grid. +# +#----------------------------------------------------------------------- +# +"GSD_HRRR_AK_3km") + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + + print_err_msg_exit "\ +The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" " + elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + + JPgrid_LON_CTR=-163.5 #HRRR-AK is -163.5 + JPgrid_LAT_CTR=62.8 #HRRR-AK is 60.8 + + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" + + JPgrid_NX=1230 #HRRR-AK is 1300 + JPgrid_NY=850 #HRRR-AK is 920 + + JPgrid_WIDE_HALO_WIDTH=6 + + DT_ATMOS="50" + + LAYOUT_X="30" + LAYOUT_Y="17" + BLOCKSIZE="25" + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group="2" + WRTCMP_output_grid="lambert_conformal" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" + WRTCMP_nx="1169" + WRTCMP_ny="762" + WRTCMP_lon_lwr_left="172.0" + WRTCMP_lat_lwr_left="49.0" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" + fi fi ;; # -esac -# #----------------------------------------------------------------------- # -# Set the name of the template file containing placeholder values for -# write-component parameters (if this file name is not already set). -# This file will be appended to the model_configure file, and place- -# holder values will be replaced with actual ones. +# 50-km HRRR Alaska grid. # #----------------------------------------------------------------------- # -if [ "$QUILTING" = "TRUE" ]; then +"GSD_HRRR_AK_50km") + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + + print_err_msg_exit "\ +The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this +predefined domain: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" +" + elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + + JPgrid_LON_CTR=-163.5 + JPgrid_LAT_CTR=62.8 + + JPgrid_DELX="50000.0" + JPgrid_DELY="50000.0" + + JPgrid_NX=74 + JPgrid_NY=51 + + JPgrid_WIDE_HALO_WIDTH=6 + + DT_ATMOS="600" + + LAYOUT_X="2" + LAYOUT_Y="3" + BLOCKSIZE="37" + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group="1" + WRTCMP_output_grid="lambert_conformal" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" + WRTCMP_nx="70" + WRTCMP_ny="45" + WRTCMP_lon_lwr_left="172.0" + WRTCMP_lat_lwr_left="49.0" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" + fi + + fi + ;; +# +esac + +} # -# First, make sure that WRTCMP_output_grid is set to a valid value. +#----------------------------------------------------------------------- # - iselementof "$WRTCMP_output_grid" valid_vals_WRTCMP_output_grid || { \ - valid_vals_WRTCMP_output_grid_str=$(printf "\"%s\" " "${valid_vals_WRTCMP_output_grid[@]}"); - print_err_msg_exit "\ -The write-component coordinate system specified in WRTCMP_output_grid is -not supported: - WRTCMP_output_grid = \"$WRTCMP_output_grid\" -WRTCMP_output_grid must be set to one of the following: - $valid_vals_WRTCMP_output_grid_str -"; } +# Call the function defined above. # -# Now set the name of the write-component template file. +#----------------------------------------------------------------------- # - WRTCMP_PARAMS_TEMPLATE_FN=${WRTCMP_PARAMS_TEMPLATE_FN:-"wrtcmp_${WRTCMP_output_grid}"} - -fi +set_predef_grid_params diff --git a/ush/setup.sh b/ush/setup.sh index 89d35ac20d..1d418b659f 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1,38 +1,65 @@ -#!/bin/sh -l - # #----------------------------------------------------------------------- # -# This script sets parameters needed by the various scripts that are -# called by the rocoto workflow. This secondary set of parameters is -# calculated using the primary set of user-defined parameters in the -# default and local workflow/experiment configuration scripts (whose -# file names are defined below). This script then saves both sets of -# parameters in a variable-definitions script in the run directory that -# will be sourced by the various scripts called by the workflow. +# This file defines and then calls a function that sets a secondary set +# of parameters needed by the various scripts that are called by the +# FV3SAR rocoto community workflow. This secondary set of parameters is +# calculated using the primary set of user-defined parameters in the de- +# fault and custom experiment/workflow configuration scripts (whose file +# names are defined below). This script then saves both sets of parame- +# ters in a global variable definitions file (really a bash script) in +# the experiment directory. This file then gets sourced by the various +# scripts called by the tasks in the workflow. # #----------------------------------------------------------------------- # - - +function setup() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +cd_vrfy ${scrfunc_dir} # #----------------------------------------------------------------------- # -# Set the current script's name and the directory in which it is loca- -# ted. +# Source bash utility functions. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -script_dir=$( dirname ${BASH_SOURCE[0]} ) +. ./source_util_funcs.sh # #----------------------------------------------------------------------- # -# Source function definition files. +# Source other necessary files. # #----------------------------------------------------------------------- # -. ./source_funcs.sh +. ./set_gridparams_GFDLgrid.sh +. ./set_gridparams_JPgrid.sh +. ./link_fix.sh # #----------------------------------------------------------------------- # @@ -41,7 +68,7 @@ script_dir=$( dirname ${BASH_SOURCE[0]} ) # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -50,47 +77,44 @@ script_dir=$( dirname ${BASH_SOURCE[0]} ) # #----------------------------------------------------------------------- # -DEFAULT_CONFIG_FN="config_defaults.sh" -LOCAL_CONFIG_FN="config.sh" +DEFAULT_EXPT_CONFIG_FN="config_defaults.sh" +EXPT_CONFIG_FN="config.sh" # #----------------------------------------------------------------------- # -# Source the configuration script containing default values of experi- -# ment variables. +# Source the default configuration file containing default values for +# the experiment/workflow variables. # #----------------------------------------------------------------------- # -. ./${DEFAULT_CONFIG_FN} +. ./${DEFAULT_EXPT_CONFIG_FN} # #----------------------------------------------------------------------- # -# If a local configuration script exists, source that as well. Here, by -# "local", we mean one that contains variable settings that are relevant -# only to the local environment (e.g. a directory setting that applies -# only to the current user on the current machine). Note that this lo- -# cal script is not tracked by the repository, whereas the default con- -# figuration script sourced above is tracked. Any variable settings in -# the local script will override the ones in the default script. The -# purpose of having a local configuration script is to avoid having to -# make changes to the default configuration script that are only appli- -# cable to one user, one machine, etc. +# If a user-specified configuration file exists, source it. This file +# contains user-specified values for a subset of the experiment/workflow +# variables that override their default values. Note that the user- +# specified configuration file is not tracked by the repository, whereas +# the default configuration file is tracked. # #----------------------------------------------------------------------- # -if [ -f "$LOCAL_CONFIG_FN" ]; then +if [ -f "${EXPT_CONFIG_FN}" ]; then # -# We require that the variables being set in the local configuration -# script have counterparts in the default configuration script. This is -# so that we do not accidentally introduce new variables in the local -# script without also officially introducing them in the default script. -# Thus, before sourcing the local configuration script, we check for -# this. +# We require that the variables being set in the user-specified configu- +# ration file have counterparts in the default configuration file. This +# is so that we do not introduce new variables in the user-specified +# configuration file without also officially introducing them in the de- +# fault configuration file. Thus, before sourcing the user-specified +# configuration file, we check that all variables in the user-specified +# configuration file are also assigned default values in the default +# configuration file. # . ./compare_config_scripts.sh # -# Now source the local configuration script. +# Now source the user-specified configuration file. # - . ./$LOCAL_CONFIG_FN + . ./${EXPT_CONFIG_FN} # fi # @@ -108,14 +132,7 @@ fi # #----------------------------------------------------------------------- # -iselementof "$RUN_ENVIR" valid_vals_RUN_ENVIR || { \ -valid_vals_RUN_ENVIR_str=$(printf "\"%s\" " "${valid_vals_RUN_ENVIR[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in RUN_ENVIR is not supported: - RUN_ENVIR = \"$RUN_ENVIR\" -RUN_ENVIR must be set to one of the following: - $valid_vals_RUN_ENVIR_str -"; } +check_var_valid_value "RUN_ENVIR" "valid_vals_RUN_ENVIR" # #----------------------------------------------------------------------- # @@ -123,14 +140,7 @@ RUN_ENVIR must be set to one of the following: # #----------------------------------------------------------------------- # -iselementof "$VERBOSE" valid_vals_VERBOSE || { \ -valid_vals_VERBOSE_str=$(printf "\"%s\" " "${valid_vals_VERBOSE[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in VERBOSE is not supported: - VERBOSE = \"$VERBOSE\" -VERBOSE must be set to one of the following: - $valid_vals_VERBOSE_str -"; } +check_var_valid_value "VERBOSE" "valid_vals_VERBOSE" # # Set VERBOSE to either "TRUE" or "FALSE" so we don't have to consider # other valid values later on. @@ -146,18 +156,31 @@ fi # #----------------------------------------------------------------------- # +# Make sure that USE_CRON_TO_RELAUNCH is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "USE_CRON_TO_RELAUNCH" "valid_vals_USE_CRON_TO_RELAUNCH" +# +# Set USE_CRON_TO_RELAUNCH to either "TRUE" or "FALSE" so we don't have to consider +# other valid values later on. +# +USE_CRON_TO_RELAUNCH=${USE_CRON_TO_RELAUNCH^^} +if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ] || \ + [ "${USE_CRON_TO_RELAUNCH}" = "YES" ]; then + USE_CRON_TO_RELAUNCH="TRUE" +elif [ "${USE_CRON_TO_RELAUNCH}" = "FALSE" ] || \ + [ "${USE_CRON_TO_RELAUNCH}" = "NO" ]; then + USE_CRON_TO_RELAUNCH="FALSE" +fi +# +#----------------------------------------------------------------------- +# # Make sure that RUN_TASK_MAKE_GRID is set to a valid value. # #----------------------------------------------------------------------- # -iselementof "${RUN_TASK_MAKE_GRID}" valid_vals_RUN_TASK_MAKE_GRID || { \ -valid_vals_RUN_TASK_MAKE_GRID_str=$(printf "\"%s\" " "${valid_vals_RUN_TASK_MAKE_GRID[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in RUN_TASK_MAKE_GRID is not supported: - RUN_TASK_MAKE_GRID = \"${RUN_TASK_MAKE_GRID}\" -RUN_TASK_MAKE_GRID must be set to one of the following: - ${valid_vals_RUN_TASK_MAKE_GRID_str} -"; } +check_var_valid_value "RUN_TASK_MAKE_GRID" "valid_vals_RUN_TASK_MAKE_GRID" # # Set RUN_TASK_MAKE_GRID to either "TRUE" or "FALSE" so we don't have to # consider other valid values later on. @@ -177,14 +200,8 @@ fi # #----------------------------------------------------------------------- # -iselementof "$RUN_TASK_MAKE_SFC_CLIMO" valid_vals_RUN_TASK_MAKE_SFC_CLIMO || { \ -valid_vals_RUN_TASK_MAKE_SFC_CLIMO_str=$(printf "\"%s\" " "${valid_vals_RUN_TASK_MAKE_SFC_CLIMO[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in RUN_TASK_MAKE_SFC_CLIMO is not supported: - RUN_TASK_MAKE_SFC_CLIMO = \"$RUN_TASK_MAKE_SFC_CLIMO\" -RUN_TASK_MAKE_SFC_CLIMO must be set to one of the following: - $valid_vals_RUN_TASK_MAKE_SFC_CLIMO_str -"; } +check_var_valid_value \ + "RUN_TASK_MAKE_SFC_CLIMO" "valid_vals_RUN_TASK_MAKE_SFC_CLIMO" # # Set RUN_TASK_MAKE_SFC_CLIMO to either "TRUE" or "FALSE" so we don't # have to consider other valid values later on. @@ -204,11 +221,10 @@ fi # if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ] && \ [ ! -d "${SFC_CLIMO_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (SFC_CLIMO_DIR) that should contain the pre-generated sur- face climatology files does not exist: - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" -" + SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"" fi # # If RUN_TASK_MAKE_SFC_CLIMO is set to "TRUE" and the variable specify- @@ -229,15 +245,7 @@ fi #----------------------------------------------------------------------- # MACHINE=$( printf "%s" "$MACHINE" | sed -e 's/\(.*\)/\U\1/' ) - -iselementof "$MACHINE" valid_vals_MACHINE || { \ -valid_vals_MACHINE_str=$(printf "\"%s\" " "${valid_vals_MACHINE[@]}"); -print_err_msg_exit "${script_name}" "\ -Machine specified in MACHINE is not supported: - MACHINE = \"$MACHINE\" -MACHINE must be set to one of the following: - $valid_vals_MACHINE_str -"; } +check_var_valid_value "MACHINE" "valid_vals_MACHINE" # #----------------------------------------------------------------------- # @@ -251,11 +259,11 @@ case $MACHINE in # "WCOSS_C") # - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." - ncores_per_node="" + NCORES_PER_NODE="" SCHED="" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -264,12 +272,12 @@ Then remove this message and rerun." # "WCOSS") # - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." - ncores_per_node="" + NCORES_PER_NODE="" SCHED="" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -278,7 +286,7 @@ Then remove this message and rerun." # "THEIA") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-"batch"} QUEUE_HPSS=${QUEUE_HPSS:-"service"} @@ -287,7 +295,7 @@ Then remove this message and rerun." # "HERA") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-"batch"} QUEUE_HPSS=${QUEUE_HPSS:-"service"} @@ -296,7 +304,7 @@ Then remove this message and rerun." # "JET") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-"batch"} QUEUE_HPSS=${QUEUE_HPSS:-"service"} @@ -305,7 +313,7 @@ Then remove this message and rerun." # "ODIN") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -314,12 +322,12 @@ Then remove this message and rerun." # "CHEYENNE") # - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." - ncores_per_node= + NCORES_PER_NODE= SCHED="" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -329,32 +337,76 @@ esac # #----------------------------------------------------------------------- # -# Set the grid type (gtype). In general, in the FV3 code, this can take +# Verify that the ACCOUNT variable is not empty. If it is, print out an +# error message and exit. +# +#----------------------------------------------------------------------- +# +if [ -z "$ACCOUNT" ]; then + print_err_msg_exit "\ +The variable ACCOUNT cannot be empty: + ACCOUNT = \"$ACCOUNT\"" +fi +# +#----------------------------------------------------------------------- +# +# Set the grid type (GTYPE). In general, in the FV3 code, this can take # on one of the following values: "global", "stretch", "nest", and "re- # gional". The first three values are for various configurations of a # global grid, while the last one is for a regional grid. Since here we -# are only interested in a regional grid, gtype must be set to "region- +# are only interested in a regional grid, GTYPE must be set to "region- # al". # #----------------------------------------------------------------------- # -gtype="regional" +GTYPE="regional" TILE_RGNL="7" # #----------------------------------------------------------------------- # -# Make sure that gtype is set to a valid value. +# Make sure that GTYPE is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "GTYPE" "valid_vals_GTYPE" +# +#----------------------------------------------------------------------- +# +# If running in NCO mode, a valid EMC grid must be specified. Make sure +# EMC_GRID_NAME is set to a valid value. +# +# Note: It is probably best to eventually eliminate EMC_GRID_NAME as a +# user-specified variable and just go with PREDEF_GRID_NAME. # #----------------------------------------------------------------------- # -iselementof "$gtype" valid_vals_gtype || { \ -valid_vals_gtype_str=$(printf "\"%s\" " "${valid_vals_gtype[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in gtype is not supported: - gtype = \"$gtype\" -gtype must be set to one of the following: - $valid_vals_gtype_str -"; } +if [ "${RUN_ENVIR}" = "nco" ]; then + err_msg="\ +The EMC grid specified in EMC_GRID_NAME is not supported: + EMC_GRID_NAME = \"${EMC_GRID_NAME}\"" + check_var_valid_value \ + "EMC_GRID_NAME" "valid_vals_EMC_GRID_NAME" "${err_msg}" +fi +# +# Map the specified EMC grid to one of the predefined grids. +# +case "${EMC_GRID_NAME}" in + "ak") + PREDEF_GRID_NAME="EMC_AK" + ;; + "conus") + PREDEF_GRID_NAME="EMC_CONUS_3km" + ;; + "conus_c96") + PREDEF_GRID_NAME="EMC_CONUS_coarse" + ;; + "conus_orig"|"guam"|"hi"|"pr") + print_err_msg_exit "\ +A predefined grid (PREDEF_GRID_NAME) has not yet been defined for this +EMC grid (EMC_GRID_NAME): + EMC_GRID_NAME = \"${EMC_GRID_NAME}\"" + ;; +esac # #----------------------------------------------------------------------- # @@ -363,19 +415,13 @@ gtype must be set to one of the following: #----------------------------------------------------------------------- # if [ ! -z ${PREDEF_GRID_NAME} ]; then - iselementof "$PREDEF_GRID_NAME" valid_vals_PREDEF_GRID_NAME || { \ - valid_vals_PREDEF_GRID_NAME_str=$(printf "\"%s\" " "${valid_vals_PREDEF_GRID_NAME[@]}"); - print_err_msg_exit "${script_name}" "\ -The predefined regional domain specified in PREDEF_GRID_NAME is not sup- + err_msg="\ +The predefined regional grid specified in PREDEF_GRID_NAME is not sup- ported: - PREDEF_GRID_NAME = \"$PREDEF_GRID_NAME\" -PREDEF_GRID_NAME must be set either to an empty string or to one of the -following: - $valid_vals_PREDEF_GRID_NAME_str -"; } + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\"" + check_var_valid_value \ + "PREDEF_GRID_NAME" "valid_vals_PREDEF_GRID_NAME" "${err_msg}" fi - - # #----------------------------------------------------------------------- # @@ -383,14 +429,8 @@ fi # #----------------------------------------------------------------------- # -iselementof "${PREEXISTING_DIR_METHOD}" valid_vals_PREEXISTING_DIR_METHOD || { \ -valid_vals_PREEXISTING_DIR_METHOD_str=$(printf "\"%s\" " "${valid_vals_PREEXISTING_DIR_METHOD[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in PREEXISTING_DIR_METHOD is not supported: - PREEXISTING_DIR_METHOD = \"${PREEXISTING_DIR_METHOD}\" -PREEXISTING_DIR_METHOD must be set to one of the following: - $valid_vals_PREEXISTING_DIR_METHOD_str -"; } +check_var_valid_value \ + "PREEXISTING_DIR_METHOD" "valid_vals_PREEXISTING_DIR_METHOD" # #----------------------------------------------------------------------- # @@ -398,14 +438,7 @@ PREEXISTING_DIR_METHOD must be set to one of the following: # #----------------------------------------------------------------------- # -iselementof "${USE_CCPP}" valid_vals_USE_CCPP || { \ -valid_vals_USE_CCPP_str=$(printf "\"%s\" " "${valid_vals_USE_CCPP[@]}"); -print_err_msg_exit "${script_name}" "\ -The value specified for the USE_CCPP flag is not supported: - USE_CCPP = \"${USE_CCPP}\" -USE_CCPP must be set to one of the following: - $valid_vals_CCPP_str -"; } +check_var_valid_value "USE_CCPP" "valid_vals_USE_CCPP" # # Set USE_CCPP to either "TRUE" or "FALSE" so we don't have to consider # other valid values later on. @@ -426,34 +459,38 @@ fi # #----------------------------------------------------------------------- # -if [ "${USE_CCPP}" = "TRUE" ]; then - - if [ ! -z ${CCPP_PHYS_SUITE} ]; then - iselementof "${CCPP_PHYS_SUITE}" valid_vals_CCPP_PHYS_SUITE || { \ - valid_vals_CCPP_PHYS_SUITE_str=$(printf "\"%s\" " "${valid_vals_CCPP_PHYS_SUITE[@]}"); - print_err_msg_exit "${script_name}" "\ +if [ "${USE_CCPP}" = "TRUE" ] && [ ! -z ${CCPP_PHYS_SUITE} ]; then + err_msg="\ The CCPP physics suite specified in CCPP_PHYS_SUITE is not supported: - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" -CCPP_PHYS_SUITE must be set to one of the following: - $valid_vals_CCPP_PHYS_SUITE_str - "; } - fi - + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" + check_var_valid_value \ + "CCPP_PHYS_SUITE" "valid_vals_CCPP_PHYS_SUITE" "${err_msg}" fi - # #----------------------------------------------------------------------- # -# Do not allow the option of running with RAP or HRRR external model -# data and GFS physics. This option is currently untested. +# If using CCPP with the GFS_2017_gfdlmp physics suite, only allow +# "GSMGFS" and "FV3GFS" as the external models for ICs and LBCs. # #----------------------------------------------------------------------- # -if [ "$EXTRN_MDL_NAME_ICS" = "HRRRX" -o "$EXTRN_MDL_NAME_LBCS" = "RAPX" ] && \ - [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then - print_err_msg_exit "${script_name}" "\ -Using $EXTRN_MDL_NAME_ICS external model data and ${CCPP_PHYS_SUITE} physics through CCPP is -untested and not currently an option in the community SAR workflow." +if [ "${USE_CCPP}" = "TRUE" ] && \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then + + if [ "${EXTRN_MDL_NAME_ICS}" != "GSMGFS" -a \ + "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] || \ + [ "${EXTRN_MDL_NAME_LBCS}" != "GSMGFS" -a \ + "${EXTRN_MDL_NAME_LBCS}" != "FV3GFS" ]; then + print_info_msg "$VERBOSE" " +The following combination of physics suite and external model(s) for ICs +and LBCs is not allowed: + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" +For this physics suite, the only external models that the workflow cur- +rently allows are \"GSMGFS\" and \"FV3GFS\"." + fi + fi # #----------------------------------------------------------------------- @@ -463,24 +500,26 @@ fi # #----------------------------------------------------------------------- # -DATE_OR_NULL=$( printf "%s" "$DATE_FIRST_CYCL" | sed -n -r -e "s/^([0-9]{8})$/\1/p" ) +DATE_OR_NULL=$( printf "%s" "${DATE_FIRST_CYCL}" | \ + sed -n -r -e "s/^([0-9]{8})$/\1/p" ) if [ -z "${DATE_OR_NULL}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ DATE_FIRST_CYCL must be a string consisting of exactly 8 digits of the form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit month, DD is the 2-digit day-of-month, and HH is the 2-digit hour-of- day. - DATE_FIRST_CYCL = \"$DATE_FIRST_CYCL\"" + DATE_FIRST_CYCL = \"${DATE_FIRST_CYCL}\"" fi -DATE_OR_NULL=$( printf "%s" "$DATE_LAST_CYCL" | sed -n -r -e "s/^([0-9]{8})$/\1/p" ) +DATE_OR_NULL=$( printf "%s" "${DATE_LAST_CYCL}" | \ + sed -n -r -e "s/^([0-9]{8})$/\1/p" ) if [ -z "${DATE_OR_NULL}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ DATE_LAST_CYCL must be a string consisting of exactly 8 digits of the form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit month, DD is the 2-digit day-of-month, and HH is the 2-digit hour-of- day. - DATE_LAST_CYCL = \"$DATE_LAST_CYCL\"" + DATE_LAST_CYCL = \"${DATE_LAST_CYCL}\"" fi # #----------------------------------------------------------------------- @@ -499,7 +538,7 @@ for CYCL in "${CYCL_HRS[@]}"; do CYCL_OR_NULL=$( printf "%s" "$CYCL" | sed -n -r -e "s/^([0-9]{2})$/\1/p" ) if [ -z "${CYCL_OR_NULL}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Each element of CYCL_HRS must be a string consisting of exactly 2 digits (including a leading \"0\", if necessary) specifying an hour-of-day. Ele- ment #$i of CYCL_HRS (where the index of the first element is 0) does not @@ -508,8 +547,9 @@ have this form: CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" fi - if [ "${CYCL_OR_NULL}" -lt "0" ] || [ "${CYCL_OR_NULL}" -gt "23" ]; then - print_err_msg_exit "${script_name}" "\ + if [ "${CYCL_OR_NULL}" -lt "0" ] || \ + [ "${CYCL_OR_NULL}" -gt "23" ]; then + print_err_msg_exit "\ Each element of CYCL_HRS must be an integer between \"00\" and \"23\", in- clusive (including a leading \"0\", if necessary), specifying an hour-of- day. Element #$i of CYCL_HRS (where the index of the first element is 0) @@ -563,7 +603,7 @@ HH_FIRST_CYCL=${CYCL_HRS[0]} # Directory in which templates of various FV3SAR input files are locat- # ed. # -# NEMSfv3gfs_DIR: +# UFS_WTHR_MDL_DIR: # Directory in which the (NEMS-enabled) FV3SAR application is located. # This directory includes subdirectories for FV3, NEMS, and FMS. If # USE_CCPP is set to "TRUE", it also includes a subdirectory for CCPP. @@ -589,35 +629,22 @@ HH_FIRST_CYCL=${CYCL_HRS[0]} # # The current script should be located in the ush subdirectory of the # workflow directory. Thus, the workflow directory is the one above the -# directory of the current script. Get the path to this directory and -# save it in HOMErrfs. +# directory of the current script. Get the path to this latter directo- +# ry and save it in HOMErrfs. # -HOMErrfs=${script_dir%/*} +HOMErrfs=${scrfunc_dir%/*} USHDIR="$HOMErrfs/ush" SCRIPTSDIR="$HOMErrfs/scripts" JOBSDIR="$HOMErrfs/jobs" SORCDIR="$HOMErrfs/sorc" PARMDIR="$HOMErrfs/parm" +MODULES_DIR="$HOMErrfs/modulefiles" EXECDIR="$HOMErrfs/exec" FIXrrfs="$HOMErrfs/fix" FIXupp="$FIXrrfs/fix_upp" FIXgsd="$FIXrrfs/fix_gsd" TEMPLATE_DIR="$USHDIR/templates" -UFS_UTILS_DIR="$SORCDIR/UFS_UTILS_develop" -NEMSfv3gfs_DIR="$SORCDIR/NEMSfv3gfs" -# -# Make sure that the NEMSfv3gfs_DIR directory exists. -# -if [ ! -d "$NEMSfv3gfs_DIR" ]; then - print_err_msg_exit "${script_name}" "\ -The NEMSfv3gfs directory specified by NEMSfv3gfs_DIR that should contain -the FV3 source code does not exist: - NEMSfv3gfs_DIR = \"$NEMSfv3gfs_DIR\" -Please clone the NEMSfv3gfs repository in this directory, build the FV3 -executable, and then rerun the workflow." -fi - case $MACHINE in @@ -657,16 +684,103 @@ case $MACHINE in ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Directories have not been specified for this machine: - MACHINE = \"$MACHINE\" -" + MACHINE = \"$MACHINE\"" ;; esac # #----------------------------------------------------------------------- # +# Set the base directories in which codes obtained from external reposi- +# tories (using the manage_externals tool) are placed. Obtain the rela- +# tive paths to these directories by reading them in from the manage_ex- +# ternals configuration file. (Note that these are relative to the lo- +# cation of the configuration file.) Then form the full paths to these +# directories. Finally, make sure that each of these directories actu- +# ally exists. +# +#----------------------------------------------------------------------- +# +mng_extrns_cfg_fn="$HOMErrfs/Externals.cfg" +property_name="local_path" +# +# Get the base directory of the FV3 forecast model code code. +# +external_name="ufs_weather_model" +UFS_WTHR_MDL_DIR=$( \ +get_manage_externals_config_property \ +"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ +print_err_msg_exit "\ +Call to function get_manage_externals_config_property failed." + +UFS_WTHR_MDL_DIR="$HOMErrfs/${UFS_WTHR_MDL_DIR}" +if [ ! -d "${UFS_WTHR_MDL_DIR}" ]; then + print_err_msg_exit "\ +The base directory in which the FV3 source code should be located (NEMS- +fv3gfs_DIR) does not exist: + UFS_WTHR_MDL_DIR = \"${UFS_WTHR_MDL_DIR}\" +Please clone the external repository containing the code in this direct- +ory, build the executable, and then rerun the workflow." +fi +# +# Get the base directory of the UFS_UTILS codes (except for chgres). +# +external_name="ufs_utils" +UFS_UTILS_DIR=$( \ +get_manage_externals_config_property \ +"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ +print_err_msg_exit "\ +Call to function get_manage_externals_config_property failed." + +UFS_UTILS_DIR="$HOMErrfs/${UFS_UTILS_DIR}" +if [ ! -d "${UFS_UTILS_DIR}" ]; then + print_err_msg_exit "\ +The base directory in which the UFS utilities source codes should be lo- +cated (UFS_UTILS_DIR) does not exist: + UFS_UTILS_DIR = \"${UFS_UTILS_DIR}\" +Please clone the external repository containing the code in this direct- +ory, build the executables, and then rerun the workflow." +fi +# +# Get the base directory of the chgres code. +# +external_name="ufs_utils_chgres" +CHGRES_DIR=$( \ +get_manage_externals_config_property \ +"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ +print_err_msg_exit "\ +Call to function get_manage_externals_config_property failed." + +CHGRES_DIR="$HOMErrfs/${CHGRES_DIR}" +if [ ! -d "${CHGRES_DIR}" ]; then + print_err_msg_exit "\ +The base directory in which the chgres source code should be located +(CHGRES_DIR) does not exist: + CHGRES_DIR = \"${CHGRES_DIR}\" +Please clone the external repository containing the code in this direct- +ory, build the executable, and then rerun the workflow." +fi +# +#----------------------------------------------------------------------- +# +# Set the names of the various tasks in the rocoto workflow XML. +# +#----------------------------------------------------------------------- +# +MAKE_GRID_TN="make_grid" +MAKE_OROG_TN="make_orog" +MAKE_SFC_CLIMO_TN="make_sfc_climo" +GET_EXTRN_ICS_TN="get_extrn_ics" +GET_EXTRN_LBCS_TN="get_extrn_lbcs" +MAKE_ICS_TN="make_ics" +MAKE_LBCS_TN="make_lbcs" +RUN_FCST_TN="run_fcst" +RUN_POST_TN="run_post" +# +#----------------------------------------------------------------------- +# # The forecast length (in integer hours) cannot contain more than 3 cha- # racters. Thus, its maximum value is 999. Check whether the specified # forecast length exceeds this maximum value. If so, print out a warn- @@ -676,7 +790,7 @@ esac # FCST_LEN_HRS_MAX="999" if [ "$FCST_LEN_HRS" -gt "$FCST_LEN_HRS_MAX" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Forecast length is greater than maximum allowed length: FCST_LEN_HRS = $FCST_LEN_HRS FCST_LEN_HRS_MAX = $FCST_LEN_HRS_MAX" @@ -694,7 +808,7 @@ fi rem=$(( ${FCST_LEN_HRS}%${LBC_UPDATE_INTVL_HRS} )) if [ "$rem" -ne "0" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The forecast length (FCST_LEN_HRS) is not evenly divisible by the later- al boundary conditions update interval (LBC_UPDATE_INTVL_HRS): FCST_LEN_HRS = $FCST_LEN_HRS @@ -727,51 +841,17 @@ fi # #----------------------------------------------------------------------- # -# For a "GFDLgrid" type of grid, make sure RES is set to a valid value. -# Then set the C-resolution (CRES). +# For a "GFDLgrid" type of grid, make sure GFDLgrid_RES is set to a va- +# lid value. # #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - - iselementof "$RES" valid_vals_RES || { \ - valid_vals_RES_str=$(printf "\"%s\" " "${valid_vals_RES[@]}"); - print_err_msg_exit "${script_name}" "\ -Number of grid cells per tile (in each horizontal direction) specified in -RES is not supported: - RES = \"$RES\" -RES must be one of the following: - $valid_vals_RES_str -"; } - - CRES="C${RES}" - -fi -# -#----------------------------------------------------------------------- -# -# For a grid with GRID_GEN_METHOD set to "JPgrid", the orography filter- -# is performed by passing to the orography filtering the parameters for -# an "equivalent" global uniform cubed-sphere grid. These are the para- -# meters that a global uniform cubed-sphere grid needs to have in order -# to have a nominal grid cell size equal to that of the (average) cell -# size on the regional grid. These globally-equivalent parameters in- -# clude a resolution (in units of number of cells in each of the two ho- -# rizontal directions) and a stretch factor. The equivalent resolution -# is calculated in the script that generates the grid and orography, and -# the stretch factor needs to be set to 1 because we are considering an -# equivalent globally UNIFORM grid. However, it turns out that with a -# non-symmetric regional grid (one in which nx is not equal to ny), set- -# ting stretch_fac to 1 fails because the orography filtering program is -# designed for a global cubed-sphere grid and thus assumes that nx and -# ny for a given tile are equal when stretch_fac is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when stretch_fac is not equal to 1. -# It turns out that the program will work if we set stretch_fac that is -# not exactly 1. This is what we do below. -# -#----------------------------------------------------------------------- -# -if [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - stretch_fac="0.999" + err_msg="\ +The number of grid cells per tile in each horizontal direction specified +in GFDLgrid_RES is not supported: + GFDLgrid_RES = \"${GFDLgrid_RES}\"" + check_var_valid_value "GFDLgrid_RES" "valid_vals_GFDLgrid_RES" "${err_msg}" fi # #----------------------------------------------------------------------- @@ -796,10 +876,9 @@ mkdir_vrfy -p "${EXPT_BASEDIR}" #----------------------------------------------------------------------- # if [ -z "${EXPT_SUBDIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The name of the experiment subdirectory (EXPT_SUBDIR) cannot be empty: - EXPT_SUBDIR = \"${EXPT_SUBDIR}\" -" + EXPT_SUBDIR = \"${EXPT_SUBDIR}\"" fi # #----------------------------------------------------------------------- @@ -823,13 +902,203 @@ LOGDIR="${EXPTDIR}/log" #----------------------------------------------------------------------- # if [ "${RUN_ENVIR}" = "nco" ]; then + FIXam="${FIXrrfs}/fix_am" - FIXsar="${FIXrrfs}/fix_sar" + FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" COMROOT="$PTMP/com" +# +# In NCO mode (i.e. if RUN_ENVIR set to "nco"), it is assumed that before +# running the experiment generation script, the path specified in FIXam +# already exists and is either itself the directory in which various fixed +# files (but not the ones containing the regional grid and the orography +# and surface climatology on that grid) are located, or it is a symlink +# to such a directory. Resolve any symlinks in the path specified by +# FIXam and check that this is the case. +# + path_resolved=$( readlink -m "$FIXam" ) + if [ ! -d "${path_resolved}" ]; then + print_err_msg_exit "\ +In NCO mode (RUN_ENVIR set to \"nco\"), the path specified by FIXam after +resolving all symlinks (path_resolved) must point to an existing directory +before an experiment can be generated. In this case, path_resolved is +not a directory or does not exist: + RUN_ENVIR = \"${RUN_ENVIR}\" + FIXam = \"$FIXam\" + path_resolved = \"${path_resolved}\" +Please correct and then rerun the experiment generation script." + fi +# +# In NCO mode (i.e. if RUN_ENVIR set to "nco"), it is assumed that before +# running the experiment generation script, the path specified in FIXsar +# already exists and is either itself the directory in which the fixed +# grid, orography, and surface climatology files are located, or it is a +# symlink to such a directory. Resolve any symlinks in the path specified +# by FIXsar and check that this is the case. +# + path_resolved=$( readlink -m "$FIXsar" ) + if [ ! -d "${path_resolved}" ]; then + print_err_msg_exit "\ +In NCO mode (RUN_ENVIR set to \"nco\"), the path specified by FIXsar after +resolving all symlinks (path_resolved) must point to an existing directory +before an experiment can be generated. In this case, path_resolved is +not a directory or does not exist: + RUN_ENVIR = \"${RUN_ENVIR}\" + FIXsar = \"$FIXsar\" + path_resolved = \"${path_resolved}\" +Please correct and then rerun the experiment generation script." + fi + else + FIXam="${EXPTDIR}/fix_am" FIXsar="${EXPTDIR}/fix_sar" COMROOT="" + +fi +# +#----------------------------------------------------------------------- +# +# The FV3 forecast model needs the following input files in the run di- +# rectory to start a forecast: +# +# (1) The data table file +# (2) The diagnostics table file +# (3) The field table file +# (4) The FV3 namelist file +# (5) The model configuration file +# (6) The NEMS configuration file +# +# If using CCPP, it also needs: +# +# (7) The CCPP physics suite definition file +# +# The workflow contains templates for the first six of these files. +# Template files are versions of these files that contain placeholder +# (i.e. dummy) values for various parameters. The experiment/workflow +# generation scripts copy these templates to appropriate locations in +# the experiment directory (either the top of the experiment directory +# or one of the cycle subdirectories) and replace the placeholders in +# these copies by actual values specified in the experiment/workflow +# configuration file (or derived from such values). The scripts then +# use the resulting "actual" files as inputs to the forecast model. +# +# Note that the CCPP physics suite defintion file does not have a cor- +# responding template file because it does not contain any values that +# need to be replaced according to the experiment/workflow configura- +# tion. If using CCPP, this file simply needs to be copied over from +# its location in the forecast model's directory structure to the ex- +# periment directory. +# +# Below, we first set the names of the templates for the first six files +# listed above. We then set the full paths to these template files. +# Note that some of these file names depend on the physics suite while +# others do not. +# +#----------------------------------------------------------------------- +# +dot_ccpp_phys_suite_or_null="" +if [ "${USE_CCPP}" = "TRUE" ]; then + dot_ccpp_phys_suite_or_null=".${CCPP_PHYS_SUITE}" +fi + +DATA_TABLE_TMPL_FN="${DATA_TABLE_FN}" +DIAG_TABLE_TMPL_FN="${DIAG_TABLE_FN}${dot_ccpp_phys_suite_or_null}" +FIELD_TABLE_TMPL_FN="${FIELD_TABLE_FN}${dot_ccpp_phys_suite_or_null}" +FV3_NML_TMPL_FN="${FV3_NML_FN}${dot_ccpp_phys_suite_or_null}" +MODEL_CONFIG_TMPL_FN="${MODEL_CONFIG_FN}${dot_ccpp_phys_suite_or_null}" +NEMS_CONFIG_TMPL_FN="${NEMS_CONFIG_FN}" + +DATA_TABLE_TMPL_FP="${TEMPLATE_DIR}/${DATA_TABLE_TMPL_FN}" +DIAG_TABLE_TMPL_FP="${TEMPLATE_DIR}/${DIAG_TABLE_TMPL_FN}" +FIELD_TABLE_TMPL_FP="${TEMPLATE_DIR}/${FIELD_TABLE_TMPL_FN}" +FV3_NML_TMPL_FP="${TEMPLATE_DIR}/${FV3_NML_TMPL_FN}" +MODEL_CONFIG_TMPL_FP="${TEMPLATE_DIR}/${MODEL_CONFIG_TMPL_FN}" +NEMS_CONFIG_TMPL_FP="${TEMPLATE_DIR}/${NEMS_CONFIG_TMPL_FN}" +# +#----------------------------------------------------------------------- +# +# If using CCPP, set: +# +# 1) the variable CCPP_PHYS_SUITE_FN to the name of the CCPP physics +# suite definition file. +# 2) the variable CCPP_PHYS_SUITE_IN_CCPP_FP to the full path of this +# file in the forecast model's directory structure. +# 3) the variable CCPP_PHYS_SUITE_FP to the full path of this file in +# the experiment directory. +# +# Note that the experiment/workflow generation scripts will copy this +# file from CCPP_PHYS_SUITE_IN_CCPP_FP to CCPP_PHYS_SUITE_FP. Then, for +# each cycle, the forecast launch script will create a link in the cycle +# run directory to the copy of this file at CCPP_PHYS_SUITE_FP. +# +# Note that if not using CCPP, the variables described above will get +# set to null strings. +# +#----------------------------------------------------------------------- +# +CCPP_PHYS_SUITE_FN="" +CCPP_PHYS_SUITE_IN_CCPP_FP="" +CCPP_PHYS_SUITE_FP="" + +if [ "${USE_CCPP}" = "TRUE" ]; then + CCPP_PHYS_SUITE_FN="suite_${CCPP_PHYS_SUITE}.xml" + CCPP_PHYS_SUITE_IN_CCPP_FP="${UFS_WTHR_MDL_DIR}/FV3/ccpp/suites/${CCPP_PHYS_SUITE_FN}" + CCPP_PHYS_SUITE_FP="${EXPTDIR}/${CCPP_PHYS_SUITE_FN}" +fi +# +#----------------------------------------------------------------------- +# +# Set the full paths to those forecast model input files that are cycle- +# independent, i.e. they don't include information about the cycle's +# starting day/time. These are: +# +# * The data table file [(1) in the list above)] +# * The field table file [(3) in the list above)] +# * The FV3 namelist file [(4) in the list above)] +# * The NEMS configuration file [(6) in the list above)] +# +# Since they are cycle-independent, the experiment/workflow generation +# scripts will place them in the main experiment directory (EXPTDIR). +# The script that runs each cycle will then create links to these files +# in the run directories of the individual cycles (which are subdirecto- +# ries under EXPTDIR). +# +# The remaining two input files to the forecast model, i.e. +# +# * The diagnostics table file [(2) in the list above)] +# * The model configuration file [(5) in the list above)] +# +# contain parameters that depend on the cycle start date. Thus, custom +# versions of these two files must be generated for each cycle and then +# placed directly in the run directories of the cycles (not EXPTDIR). +# For this reason, the full paths to their locations vary by cycle and +# cannot be set here (i.e. they can only be set in the loop over the +# cycles in the rocoto workflow XML file). +# +#----------------------------------------------------------------------- +# +DATA_TABLE_FP="${EXPTDIR}/${DATA_TABLE_FN}" +FIELD_TABLE_FP="${EXPTDIR}/${FIELD_TABLE_FN}" +FV3_NML_FP="${EXPTDIR}/${FV3_NML_FN}" +NEMS_CONFIG_FP="${EXPTDIR}/${NEMS_CONFIG_FN}" +# +#----------------------------------------------------------------------- +# +# Set the full path to the script that can be used to (re)launch the +# workflow. Also, if USE_CRON_TO_RELAUNCH is set to TRUE, set the line +# to add to the cron table to automatically relaunch the workflow every +# CRON_RELAUNCH_INTVL_MNTS minutes. Otherwise, set the variable con- +# taining this line to a null string. +# +#----------------------------------------------------------------------- +# +WFLOW_LAUNCH_SCRIPT_FP="$USHDIR/${WFLOW_LAUNCH_SCRIPT_FN}" +WFLOW_LAUNCH_LOG_FP="$EXPTDIR/${WFLOW_LAUNCH_LOG_FN}" +if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + CRONTAB_LINE="*/${CRON_RELAUNCH_INTVL_MNTS} * * * * cd $EXPTDIR && \ +./${WFLOW_LAUNCH_SCRIPT_FN} >> ./${WFLOW_LAUNCH_LOG_FN} 2>&1" +else + CRONTAB_LINE="" fi # #----------------------------------------------------------------------- @@ -861,7 +1130,7 @@ if [ "${RUN_ENVIR}" = "nco" ]; then [ "${RUN_TASK_MAKE_GRID}" = "FALSE" -a \ "${GRID_DIR}" != "$FIXsar" ]; then - msg="\ + msg=" When RUN_ENVIR is set to \"nco\", it is assumed that grid files already exist in the directory specified by FIXsar. Thus, the grid file genera- tion task must not be run (i.e. RUN_TASK_MAKE_GRID must be set to @@ -879,7 +1148,6 @@ of FIXsar. Reset values are:" msg="$msg"" RUN_TASK_MAKE_GRID = \"${RUN_TASK_MAKE_GRID}\" GRID_DIR = \"${GRID_DIR}\" - " print_info_msg "$msg" @@ -890,7 +1158,7 @@ of FIXsar. Reset values are:" [ "${RUN_TASK_MAKE_OROG}" = "FALSE" -a \ "${OROG_DIR}" != "$FIXsar" ]; then - msg="\ + msg=" When RUN_ENVIR is set to \"nco\", it is assumed that orography files al- ready exist in the directory specified by FIXsar. Thus, the orography file generation task must not be run (i.e. RUN_TASK_MAKE_OROG must be @@ -908,7 +1176,6 @@ of FIXsar. Reset values are:" msg="$msg"" RUN_TASK_MAKE_OROG = \"${RUN_TASK_MAKE_OROG}\" OROG_DIR = \"${OROG_DIR}\" - " print_info_msg "$msg" @@ -919,7 +1186,7 @@ of FIXsar. Reset values are:" [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" -a \ "${SFC_CLIMO_DIR}" != "$FIXsar" ]; then - msg="\ + msg=" When RUN_ENVIR is set to \"nco\", it is assumed that surface climatology files already exist in the directory specified by FIXsar. Thus, the surface climatology file generation task must not be run (i.e. RUN_- @@ -936,8 +1203,7 @@ contents of FIXsar. Reset values are:" msg="$msg"" RUN_TASK_MAKE_SFC_CLIMO = \"${RUN_TASK_MAKE_SFC_CLIMO}\" - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"\n - + SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" " print_info_msg "$msg" @@ -956,15 +1222,12 @@ else #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - if [ ! -d "${GRID_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (GRID_DIR) that should contain the pre-generated grid files does not exist: - GRID_DIR = \"${GRID_DIR}\" -" + GRID_DIR = \"${GRID_DIR}\"" fi - else GRID_DIR="$EXPTDIR/grid" fi @@ -979,15 +1242,12 @@ files does not exist: #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - if [ ! -d "${OROG_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (OROG_DIR) that should contain the pre-generated orography files does not exist: - OROG_DIR = \"${OROG_DIR}\" -" + OROG_DIR = \"${OROG_DIR}\"" fi - else OROG_DIR="$EXPTDIR/orog" fi @@ -1004,11 +1264,10 @@ files does not exist: if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then if [ ! -d "${SFC_CLIMO_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (SFC_CLIMO_DIR) that should contain the pre-generated orography files does not exist: - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" -" + SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"" fi else @@ -1023,15 +1282,12 @@ fi # #----------------------------------------------------------------------- # -iselementof "$EXTRN_MDL_NAME_ICS" valid_vals_EXTRN_MDL_NAME_ICS || { \ -valid_vals_EXTRN_MDL_NAME_ICS_str=$(printf "\"%s\" " "${valid_vals_EXTRN_MDL_NAME_ICS[@]}"); -print_err_msg_exit "${script_name}" "\ +err_msg="\ The external model specified in EXTRN_MDL_NAME_ICS that provides initial conditions (ICs) and surface fields to the FV3SAR is not supported: - EXTRN_MDL_NAME_ICS = \"$EXTRN_MDL_NAME_ICS\" -EXTRN_MDL_NAME_ICS must be one of the following: - $valid_vals_EXTRN_MDL_NAME_ICS_str -"; } + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" +check_var_valid_value \ + "EXTRN_MDL_NAME_ICS" "valid_vals_EXTRN_MDL_NAME_ICS" "${err_msg}" # #----------------------------------------------------------------------- # @@ -1039,30 +1295,42 @@ EXTRN_MDL_NAME_ICS must be one of the following: # #----------------------------------------------------------------------- # -iselementof "$EXTRN_MDL_NAME_LBCS" valid_vals_EXTRN_MDL_NAME_LBCS || { \ -valid_vals_EXTRN_MDL_NAME_LBCS_str=$(printf "\"%s\" " "${valid_vals_EXTRN_MDL_NAME_LBCS[@]}"); -print_err_msg_exit "${script_name}" "\ -The external model specified in EXTRN_MDL_NAME_LBCS that provides later- -al boundary conditions (LBCs) to the FV3SAR is not supported: - EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" -EXTRN_MDL_NAME_LBCS must be one of the following: - $valid_vals_EXTRN_MDL_NAME_LBCS_str -"; } +err_msg="\ +The external model specified in EXTRN_MDL_NAME_ICS that provides lateral +boundary conditions (LBCs) to the FV3SAR is not supported: + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" +check_var_valid_value \ + "EXTRN_MDL_NAME_LBCS" "valid_vals_EXTRN_MDL_NAME_LBCS" "${err_msg}" +# +#----------------------------------------------------------------------- +# +# Make sure FV3GFS_FILE_FMT_ICS is set to a valid value. +# +#----------------------------------------------------------------------- +# +if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then + err_msg="\ +The file format for FV3GFS external model files specified in FV3GFS_- +FILE_FMT_ICS is not supported: + FV3GFS_FILE_FMT_ICS = \"${FV3GFS_FILE_FMT_ICS}\"" + check_var_valid_value \ + "FV3GFS_FILE_FMT_ICS" "valid_vals_FV3GFS_FILE_FMT_ICS" "${err_msg}" +fi # #----------------------------------------------------------------------- # -# Make sure FV3GFS_DATA_TYPE is set to a valid value. +# Make sure FV3GFS_FILE_FMT_LBCS is set to a valid value. # #----------------------------------------------------------------------- # -iselementof "$FV3GFS_DATA_TYPE" valid_vals_FV3GFS_DATA_TYPE || { \ -valid_vals_FV3GFS_DATA_TYPE_str=$(printf "\"%s\" " "${valid_vals_FV3GFS_DATA_TYPE[@]}"); -print_err_msg_exit "${script_name}" "\ -The data type specified in FV3GFS_DATA_TYPE is not supported: - FV3GFS_DATA_TYPE = \"$FV3GFS_DATA_TYPE\" -FV3GFS_DATA_TYPE must be one of the following: - $valid_vals_FV3GFS_DATA_TYPE_str -"; } +if [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then + err_msg="\ +The file format for FV3GFS external model files specified in FV3GFS_- +FILE_FMT_LBCS is not supported: + FV3GFS_FILE_FMT_LBCS = \"${FV3GFS_FILE_FMT_LBCS}\"" + check_var_valid_value \ + "FV3GFS_FILE_FMT_LBCS" "valid_vals_FV3GFS_FILE_FMT_LBCS" "${err_msg}" +fi # #----------------------------------------------------------------------- # @@ -1075,22 +1343,20 @@ if [ "${RUN_ENVIR}" = "nco" ]; then if [ "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] && \ [ "${EXTRN_MDL_NAME_ICS}" != "GSMGFS" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ When RUN_ENVIR set to \"nco\", the external model used for the initial conditions and surface fields must be either \"FV3GFS\" or \"GSMGFS\": RUN_ENVIR = \"${RUN_ENVIR}\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" fi if [ "${EXTRN_MDL_NAME_LBCS}" != "FV3GFS" ] && \ [ "${EXTRN_MDL_NAME_LBCS}" != "GSMGFS" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ When RUN_ENVIR set to \"nco\", the external model used for the initial conditions and surface fields must be either \"FV3GFS\" or \"GSMGFS\": RUN_ENVIR = \"${RUN_ENVIR}\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" fi fi @@ -1142,22 +1408,22 @@ fi # Note that the regional grid is referred to as "tile 7" in the code. # We will let: # -# * nh0_T7 denote the width (in units of number of cells on tile 7) of -# the 0-cell-wide halo, i.e. nh0_T7 = 0; +# * NH0 denote the width (in units of number of cells on tile 7) of +# the 0-cell-wide halo, i.e. NH0 = 0; # -# * nh3_T7 denote the width (in units of number of cells on tile 7) of -# the 3-cell-wide halo, i.e. nh3_T7 = 3; and +# * NH3 denote the width (in units of number of cells on tile 7) of +# the 3-cell-wide halo, i.e. NH3 = 3; and # -# * nh4_T7 denote the width (in units of number of cells on tile 7) of -# the 4-cell-wide halo, i.e. nh4_T7 = 4. +# * NH4 denote the width (in units of number of cells on tile 7) of +# the 4-cell-wide halo, i.e. NH4 = 4. # # We define these variables next. # #----------------------------------------------------------------------- # -nh0_T7=0 -nh3_T7=3 -nh4_T7=$(( $nh3_T7 + 1 )) +NH0=0 +NH3=3 +NH4=4 # #----------------------------------------------------------------------- # @@ -1165,14 +1431,12 @@ nh4_T7=$(( $nh3_T7 + 1 )) # #----------------------------------------------------------------------- # -iselementof "${GRID_GEN_METHOD}" valid_vals_GRID_GEN_METHOD || { \ -valid_vals_GRID_GEN_METHOD_str=$(printf "\"%s\" " "${valid_vals_GRID_GEN_METHOD[@]}"); -print_err_msg_exit "${script_name}" "\ -The grid generation method specified in GRID_GEN_METHOD is not supported: - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -GRID_GEN_METHOD must be one of the following: - $valid_vals_GRID_GEN_METHOD_str -"; } +err_msg="\ +The horizontal grid generation method specified in GRID_GEN_METHOD is +not supported: + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" +check_var_valid_value \ + "GRID_GEN_METHOD" "valid_vals_GRID_GEN_METHOD" "${err_msg}" # #----------------------------------------------------------------------- # @@ -1184,7 +1448,26 @@ GRID_GEN_METHOD must be one of the following: # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - . $USHDIR/set_gridparams_GFDLgrid.sh + set_gridparams_GFDLgrid \ + lon_of_t6_ctr="${GFDLgrid_LON_T6_CTR}" \ + lat_of_t6_ctr="${GFDLgrid_LAT_T6_CTR}" \ + res_of_t6g="${GFDLgrid_RES}" \ + stretch_factor="${GFDLgrid_STRETCH_FAC}" \ + refine_ratio_t6g_to_t7g="${GFDLgrid_REFINE_RATIO}" \ + istart_of_t7_on_t6g="${GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G}" \ + iend_of_t7_on_t6g="${GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G}" \ + jstart_of_t7_on_t6g="${GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G}" \ + jend_of_t7_on_t6g="${GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G}" \ + output_varname_lon_of_t7_ctr="LON_CTR" \ + output_varname_lat_of_t7_ctr="LAT_CTR" \ + output_varname_nx_of_t7_on_t7g="NX" \ + output_varname_ny_of_t7_on_t7g="NY" \ + output_varname_halo_width_on_t7g="NHW" \ + output_varname_stretch_factor="STRETCH_FAC" \ + output_varname_istart_of_t7_with_halo_on_t6sg="ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ + output_varname_iend_of_t7_with_halo_on_t6sg="IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ + output_varname_jstart_of_t7_with_halo_on_t6sg="JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ + output_varname_jend_of_t7_with_halo_on_t6sg="JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" # #----------------------------------------------------------------------- # @@ -1194,9 +1477,193 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - . $USHDIR/set_gridparams_JPgrid.sh + set_gridparams_JPgrid \ + lon_ctr="${JPgrid_LON_CTR}" \ + lat_ctr="${JPgrid_LAT_CTR}" \ + nx="${JPgrid_NX}" \ + ny="${JPgrid_NY}" \ + halo_width="${JPgrid_WIDE_HALO_WIDTH}" \ + delx="${JPgrid_DELX}" \ + dely="${JPgrid_DELY}" \ + alpha="${JPgrid_ALPHA_PARAM}" \ + kappa="${JPgrid_KAPPA_PARAM}" \ + output_varname_lon_ctr="LON_CTR" \ + output_varname_lat_ctr="LAT_CTR" \ + output_varname_nx="NX" \ + output_varname_ny="NY" \ + output_varname_halo_width="NHW" \ + output_varname_stretch_factor="STRETCH_FAC" \ + output_varname_del_angle_x_sg="DEL_ANGLE_X_SG" \ + output_varname_del_angle_y_sg="DEL_ANGLE_Y_SG" \ + output_varname_neg_nx_of_dom_with_wide_halo="NEG_NX_OF_DOM_WITH_WIDE_HALO" \ + output_varname_neg_ny_of_dom_with_wide_halo="NEG_NY_OF_DOM_WITH_WIDE_HALO" + +fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +RES_IN_FIXSAR_FILENAMES="" + +if [ "${RUN_ENVIR}" != "nco" ]; then + mkdir_vrfy -p "$FIXsar" +fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +# Is this if-statement still necessary? +if [ "${RUN_ENVIR}" = "nco" ]; then + + glob_pattern="C*_mosaic.nc" + cd_vrfy $FIXsar + num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) + + if [ "${num_files}" -ne "1" ]; then + print_err_msg_exit "\ +Exactly one file must exist in directory FIXsar matching the globbing +pattern glob_pattern: + FIXsar = \"${FIXsar}\" + glob_pattern = \"${glob_pattern}\" + num_files = ${num_files}" + fi + + fn=$( ls -1 ${glob_pattern} ) + RES_IN_FIXSAR_FILENAMES=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*)_mosaic.nc/\1/p" ) +echo "RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ + [ "${GFDLgrid_RES}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ +The resolution extracted from the fixed file names (RES_IN_FIXSAR_FILENAMES) +does not match the resolution specified by GFDLgrid_RES: + GFDLgrid_RES = ${GFDLgrid_RES} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + fi + +# RES_equiv=$( ncdump -h "${grid_fn}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") +# RES_equiv=${RES_equiv//$'\n'/} +#printf "%s\n" "RES_equiv = $RES_equiv" +# CRES_equiv="C${RES_equiv}" +#printf "%s\n" "CRES_equiv = $CRES_equiv" +# +# RES="$RES_equiv" +# CRES="$CRES_equiv" + +else +# +#----------------------------------------------------------------------- +# +# If the grid file generation task in the workflow is going to be +# skipped (because pregenerated files are available), create links in +# the FIXsar directory to the pregenerated grid files. +# +#----------------------------------------------------------------------- +# + res_in_grid_fns="" + if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then + + link_fix \ + verbose="$VERBOSE" \ + file_group="grid" \ + output_varname_res_in_filenames="res_in_grid_fns" || \ + print_err_msg_exit "\ + Call to function to create links to grid files failed." + + RES_IN_FIXSAR_FILENAMES="${res_in_grid_fns}" + + fi +# +#----------------------------------------------------------------------- +# +# If the orography file generation task in the workflow is going to be +# skipped (because pregenerated files are available), create links in +# the FIXsar directory to the pregenerated orography files. +# +#----------------------------------------------------------------------- +# + res_in_orog_fns="" + if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then + + link_fix \ + verbose="$VERBOSE" \ + file_group="orog" \ + output_varname_res_in_filenames="res_in_orog_fns" || \ + print_err_msg_exit "\ + Call to function to create links to orography files failed." + + if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ + [ "${res_in_orog_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ + The resolution extracted from the orography file names (res_in_orog_fns) + does not match the resolution in other groups of files already consi- + dered (RES_IN_FIXSAR_FILENAMES): + res_in_orog_fns = ${res_in_orog_fns} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + else + RES_IN_FIXSAR_FILENAMES="${res_in_orog_fns}" + fi + + fi +# +#----------------------------------------------------------------------- +# +# If the surface climatology file generation task in the workflow is +# going to be skipped (because pregenerated files are available), create +# links in the FIXsar directory to the pregenerated surface climatology +# files. +# +#----------------------------------------------------------------------- +# + res_in_sfc_climo_fns="" + if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then + + link_fix \ + verbose="$VERBOSE" \ + file_group="sfc_climo" \ + output_varname_res_in_filenames="res_in_sfc_climo_fns" || \ + print_err_msg_exit "\ + Call to function to create links to surface climatology files failed." + + if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ + [ "${res_in_sfc_climo_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ + The resolution extracted from the surface climatology file names (res_- + in_sfc_climo_fns) does not match the resolution in other groups of files + already considered (RES_IN_FIXSAR_FILENAMES): + res_in_sfc_climo_fns = ${res_in_sfc_climo_fns} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + else + RES_IN_FIXSAR_FILENAMES="${res_in_sfc_climo_fns}" + fi + fi + +fi +# +#----------------------------------------------------------------------- +# +# The variable CRES is needed in constructing various file names. If +# not running the make_grid task, we can set it here. Otherwise, it +# will get set to a valid value by that task. +# +#----------------------------------------------------------------------- +# +CRES="" +if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then + CRES="C${RES_IN_FIXSAR_FILENAMES}" fi + + + + + # #----------------------------------------------------------------------- # @@ -1204,14 +1671,7 @@ fi # #----------------------------------------------------------------------- # -iselementof "$QUILTING" valid_vals_QUILTING || { \ -valid_vals_QUILTING_str=$(printf "\"%s\" " "${valid_vals_QUILTING[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in QUILTING is not supported: - QUILTING = \"$QUILTING\" -QUILTING must be set to one of the following: - $valid_vals_QUILTING_str -"; } +check_var_valid_value "QUILTING" "valid_vals_QUILTING" # # Set QUILTING to either "TRUE" or "FALSE" so we don't have to consider # other valid values later on. @@ -1227,106 +1687,135 @@ fi # #----------------------------------------------------------------------- # +# Make sure that PRINT_ESMF is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "PRINT_ESMF" "valid_vals_PRINT_ESMF" +# +# Set PRINT_ESMF to either "TRUE" or "FALSE" so we don't have to consider +# other valid values later on. +# +PRINT_ESMF=${PRINT_ESMF^^} +if [ "${PRINT_ESMF}" = "TRUE" ] || \ + [ "${PRINT_ESMF}" = "YES" ]; then + PRINT_ESMF="TRUE" +elif [ "${PRINT_ESMF}" = "FALSE" ] || \ + [ "${PRINT_ESMF}" = "NO" ]; then + PRINT_ESMF="FALSE" +fi +# +#----------------------------------------------------------------------- +# # Calculate PE_MEMBER01. This is the number of MPI tasks used for the # forecast, including those for the write component if QUILTING is set # to "TRUE". # #----------------------------------------------------------------------- # -PE_MEMBER01=$(( $layout_x*$layout_y )) +PE_MEMBER01=$(( LAYOUT_X*LAYOUT_Y )) if [ "$QUILTING" = "TRUE" ]; then - PE_MEMBER01=$(( $PE_MEMBER01 + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) + PE_MEMBER01=$(( ${PE_MEMBER01} + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) fi -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " The number of MPI tasks for the forecast (including those for the write component if it is being used) are: - PE_MEMBER01 = $PE_MEMBER01" + PE_MEMBER01 = ${PE_MEMBER01}" # #----------------------------------------------------------------------- # # Make sure that the number of cells in the x and y direction are divi- -# sible by the MPI task dimensions layout_x and layout_y, respectively. +# sible by the MPI task dimensions LAYOUT_X and LAYOUT_Y, respectively. # #----------------------------------------------------------------------- # -rem=$(( $nx_T7%$layout_x )) +rem=$(( NX%LAYOUT_X )) if [ $rem -ne 0 ]; then - print_err_msg_exit "${script_name}" "\ -The number of grid cells in the x direction (nx_T7) is not evenly divisible -by the number of MPI tasks in the x direction (layout_x): - nx_T7 = $nx_T7 - layout_x = $layout_x" + print_err_msg_exit "\ +The number of grid cells in the x direction (NX) is not evenly divisible +by the number of MPI tasks in the x direction (LAYOUT_X): + NX = $NX + LAYOUT_X = ${LAYOUT_X}" fi -rem=$(( $ny_T7%$layout_y )) +rem=$(( NY%LAYOUT_Y )) if [ $rem -ne 0 ]; then - print_err_msg_exit "${script_name}" "\ -The number of grid cells in the y direction (ny_T7) is not evenly divisible -by the number of MPI tasks in the y direction (layout_y): - ny_T7 = $ny_T7 - layout_y = $layout_y" + print_err_msg_exit "\ +The number of grid cells in the y direction (NY) is not evenly divisible +by the number of MPI tasks in the y direction (LAYOUT_Y): + NY = $NY + LAYOUT_Y = ${LAYOUT_Y}" fi -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" " The MPI task layout is: - layout_x = $layout_x - layout_y = $layout_y" + LAYOUT_X = ${LAYOUT_X} + LAYOUT_Y = ${LAYOUT_Y}" # #----------------------------------------------------------------------- # # Make sure that, for a given MPI task, the number columns (which is -# equal to the number of horizontal cells) is divisible by the blocksize. +# equal to the number of horizontal cells) is divisible by BLOCKSIZE. # #----------------------------------------------------------------------- # -nx_per_task=$(( $nx_T7/$layout_x )) -ny_per_task=$(( $ny_T7/$layout_y )) +nx_per_task=$(( NX/LAYOUT_X )) +ny_per_task=$(( NY/LAYOUT_Y )) num_cols_per_task=$(( $nx_per_task*$ny_per_task )) -rem=$(( $num_cols_per_task%$blocksize )) +rem=$(( num_cols_per_task%BLOCKSIZE )) if [ $rem -ne 0 ]; then prime_factors_num_cols_per_task=$( factor $num_cols_per_task | sed -r -e 's/^[0-9]+: (.*)/\1/' ) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of columns assigned to a given MPI task must be divisible by -the blocksize: - nx_per_task = nx_T7/layout_x = $nx_T7/$layout_x = $nx_per_task - ny_per_task = ny_T7/layout_y = $ny_T7/$layout_y = $ny_per_task - num_cols_per_task = nx_per_task*ny_per_task = $num_cols_per_task - blocksize = $blocksize - rem = num_cols_per_task%%blocksize = $rem -The prime factors of num_cols_per_task are (useful for determining a valid -blocksize): - prime_factors_num_cols_per_task: $prime_factors_num_cols_per_task" +BLOCKSIZE: + nx_per_task = NX/LAYOUT_X = $NX/${LAYOUT_X} = ${nx_per_task} + ny_per_task = NY/LAYOUT_Y = $NY/${LAYOUT_Y} = ${ny_per_task} + num_cols_per_task = nx_per_task*ny_per_task = ${num_cols_per_task} + BLOCKSIZE = $BLOCKSIZE + rem = num_cols_per_task%%BLOCKSIZE = $rem +The prime factors of num_cols_per_task are (useful for determining a va- +lid BLOCKSIZE): + prime_factors_num_cols_per_task: ${prime_factors_num_cols_per_task}" fi # #----------------------------------------------------------------------- # -# If the write component is going to be used to write output files (i.e. -# if QUILTING is set to "TRUE"), first make sure that a name is speci- -# fied for the template file containing the write-component output grid -# parameters. (This template file will be concatenated to the NEMS con- -# figuration file specified in MODEL_CONFIG_FN.) If so, set the full -# path to the file and make sure that the file exists. +# Initialize the full path to the template file containing placeholder +# values for the write component parameters. Then, if the write component +# is going to be used to write output files to disk (i.e. if QUILTING is +# set to "TRUE"), set the full path to this file. This file will be +# appended to the NEMS configuration file (MODEL_CONFIG_FN), and placeholder +# values will be replaced with actual ones. # #----------------------------------------------------------------------- # -if [ "$QUILTING" = "TRUE" ]; then +WRTCMP_PARAMS_TMPL_FP="" - if [ -z "$WRTCMP_PARAMS_TEMPLATE_FN" ]; then - print_err_msg_exit "${script_name}" "\ -The write-component template file name (WRTCMP_PARAMS_TEMPLATE_FN) must -be set to a non-empty value when quilting (i.e. the write-component) is -enabled: - QUILTING = \"$QUILTING\" - WRTCMP_PARAMS_TEMPLATE_FN = \"$WRTCMP_PARAMS_TEMPLATE_FN\"" - fi - - WRTCMP_PARAMS_TEMPLATE_FP="$TEMPLATE_DIR/$WRTCMP_PARAMS_TEMPLATE_FN" - if [ ! -f "$WRTCMP_PARAMS_TEMPLATE_FP" ]; then - print_err_msg_exit "${script_name}" "\ +if [ "$QUILTING" = "TRUE" ]; then +# +# First, make sure that WRTCMP_output_grid is set to a valid value. +# + err_msg="\ +The coordinate system used by the write-component output grid specified +in WRTCMP_output_grid is not supported: + WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" + check_var_valid_value \ + "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" +# +# Now set the name of the write-component template file. +# + wrtcmp_params_tmpl_fn=${wrtcmp_params_tmpl_fn:-"wrtcmp_${WRTCMP_output_grid}"} +# +# Finally, set the full path to the write component template file and +# make sure that the file exists. +# + WRTCMP_PARAMS_TMPL_FP="${TEMPLATE_DIR}/${wrtcmp_params_tmpl_fn}" + if [ ! -f "${WRTCMP_PARAMS_TMPL_FP}" ]; then + print_err_msg_exit "\ The write-component template file does not exist or is not a file: - WRTCMP_PARAMS_TEMPLATE_FP = \"$WRTCMP_PARAMS_TEMPLATE_FP\"" + WRTCMP_PARAMS_TMPL_FP = \"${WRTCMP_PARAMS_TMPL_FP}\"" fi fi @@ -1334,28 +1823,28 @@ fi #----------------------------------------------------------------------- # # If the write component is going to be used, make sure that the number -# of grid cells in the y direction (ny_T7) is divisible by the number of -# write tasks per group. This is because the ny_T7 rows of the grid -# must be distributed evenly among the write_tasks_per_group tasks in a -# given write group, i.e. each task must receive the same number of -# rows. This implies that ny_T7 must be evenly divisible by write_- -# tasks_per_group. If it isn't, the write component will hang or fail. -# We check for this below. +# of grid cells in the y direction (NY) is divisible by the number of +# write tasks per group. This is because the NY rows of the grid must +# be distributed evenly among the write_tasks_per_group tasks in a given +# write group, i.e. each task must receive the same number of rows. +# This implies that NY must be evenly divisible by WRTCMP_write_tasks_- +# per_group. If it isn't, the write component will hang or fail. We +# check for this below. # #----------------------------------------------------------------------- # if [ "$QUILTING" = "TRUE" ]; then - rem=$(( $ny_T7%${WRTCMP_write_tasks_per_group} )) + rem=$(( NY%WRTCMP_write_tasks_per_group )) if [ $rem -ne 0 ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of grid points in the y direction on the regional grid (ny_- T7) must be evenly divisible by the number of tasks per write group (WRTCMP_write_tasks_per_group): - ny_T7 = $ny_T7 + NY = $NY WRTCMP_write_tasks_per_group = $WRTCMP_write_tasks_per_group - ny_T7%%write_tasks_per_group = $rem" + NY%%write_tasks_per_group = $rem" fi fi @@ -1364,9 +1853,9 @@ fi # # Calculate the number of nodes (NUM_NODES) to request from the job # scheduler. This is just PE_MEMBER01 dividied by the number of cores -# per node (ncores_per_node) rounded up to the nearest integer, i.e. +# per node (NCORES_PER_NODE) rounded up to the nearest integer, i.e. # -# NUM_NODES = ceil(PE_MEMBER01/ncores_per_node) +# NUM_NODES = ceil(PE_MEMBER01/NCORES_PER_NODE) # # where ceil(...) is the ceiling function, i.e. it rounds its floating # point argument up to the next larger integer. Since in bash division @@ -1375,11 +1864,11 @@ fi # adding the denominator (of the argument of ceil(...) above) minus 1 to # the original numerator, i.e. by redefining NUM_NODES to be # -# NUM_NODES = (PE_MEMBER01 + ncores_per_node - 1)/ncores_per_node +# NUM_NODES = (PE_MEMBER01 + NCORES_PER_NODE - 1)/NCORES_PER_NODE # #----------------------------------------------------------------------- # -NUM_NODES=$(( ($PE_MEMBER01 + $ncores_per_node - 1)/$ncores_per_node )) +NUM_NODES=$(( (PE_MEMBER01 + NCORES_PER_NODE - 1)/NCORES_PER_NODE )) # #----------------------------------------------------------------------- # @@ -1392,19 +1881,18 @@ NUM_NODES=$(( ($PE_MEMBER01 + $ncores_per_node - 1)/$ncores_per_node )) # #----------------------------------------------------------------------- # -num_fixam_files_sysdir="${#FIXam_FILES_SYSDIR[@]}" -num_fixam_files_exptdir="${#FIXam_FILES_EXPTDIR[@]}" -if [ "${num_fixam_files_sysdir}" -ne "${num_fixam_files_exptdir}" ]; then - print_err_msg_exit "${script_name}" "\ -The number of fixed files specified in FIXam_FILES_SYSDIR must be equal -to that specified in FIXam_FILES_EXPTDIR: - num_fixam_files_sysdir = ${num_fixam_files_sysdir} - num_fixam_files_exptdir = ${num_fixam_files_exptdir} -" +num_fixgsm_files="${#FIXgsm_FILENAMES[@]}" +num_fixam_files="${#FIXam_FILENAMES[@]}" +if [ "${num_fixgsm_files}" -ne "${num_fixam_files}" ]; then + print_err_msg_exit "\ +The number of fixed files specified in the array FIXgsm_FILENAMES +(num_fixgsm_files) must be equal to that specified in the array FIXam_FILENAMES +(num_fixam_files): + num_fixgsm_files = ${num_fixgsm_files} + num_fixam_files = ${num_fixam_files}" else - NUM_FIXam_FILES="${num_fixam_files_sysdir}" + NUM_FIXam_FILES="${num_fixam_files}" fi - # #----------------------------------------------------------------------- # @@ -1414,29 +1902,25 @@ fi #----------------------------------------------------------------------- # mkdir_vrfy -p "$EXPTDIR" - -# Maybe do the following later? Not sure yet... -if [ "${RUN_ENVIR}" != "nco" ]; then - mkdir_vrfy -p $FIXsar -fi # #----------------------------------------------------------------------- # -# Generate the shell script that will appear in the run directory (RUN- -# DIR) and will contain definitions of variables needed by the various -# scripts in the workflow. We refer to this as the variable definitions -# file. We will create this file by: +# Generate the shell script that will appear in the experiment directory +# (EXPTDIR) and will contain definitions of variables needed by the va- +# rious scripts in the workflow. We refer to this as the experiment/ +# workflow global variable definitions file. We will create this file +# by: # -# 1) Copying the default workflow/experiment configuration script (spe- -# fied by DEFAULT_CONFIG_FN and located in the shell script directory -# USHDIR) to the run directory and renaming it to the name specified -# by SCRIPT_VAR_DEFNS_FN. +# 1) Copying the default workflow/experiment configuration file (speci- +# fied by DEFAULT_EXPT_CONFIG_FN and located in the shell script di- +# rectory specified by USHDIR) to the experiment directory and rena- +# ming it to the name specified by GLOBAL_VAR_DEFNS_FN. # -# 2) Resetting the original values of the variables defined in this file -# to their current values. This is necessary because these variables -# may have been reset by the local configuration script (if one ex- -# ists in USHDIR) and/or by this setup script, e.g. because predef_- -# domain is set to a valid non-empty value. +# 2) Resetting the default variable values in this file to their current +# values. This is necessary because these variables may have been +# reset by the user-specified configuration file (if one exists in +# USHDIR) and/or by this setup script, e.g. because predef_domain is +# set to a valid non-empty value. # # 3) Appending to the variable definitions file any new variables intro- # duced in this setup script that may be needed by the scripts that @@ -1448,8 +1932,8 @@ fi # #----------------------------------------------------------------------- # -SCRIPT_VAR_DEFNS_FP="$EXPTDIR/$SCRIPT_VAR_DEFNS_FN" -cp_vrfy ./${DEFAULT_CONFIG_FN} ${SCRIPT_VAR_DEFNS_FP} +GLOBAL_VAR_DEFNS_FP="$EXPTDIR/$GLOBAL_VAR_DEFNS_FN" +cp_vrfy $USHDIR/${DEFAULT_EXPT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1457,15 +1941,15 @@ cp_vrfy ./${DEFAULT_CONFIG_FN} ${SCRIPT_VAR_DEFNS_FP} #----------------------------------------------------------------------- # -# Read all lines of SCRIPT_VAR_DEFNS file into the variable line_list. -line_list=$( sed -r -e "s/(.*)/\1/g" ${SCRIPT_VAR_DEFNS_FP} ) +# Read all lines of GLOBAL_VAR_DEFNS file into the variable line_list. +line_list=$( sed -r -e "s/(.*)/\1/g" ${GLOBAL_VAR_DEFNS_FP} ) # # Loop through the lines in line_list and concatenate lines ending with # the line bash continuation character "\". # -rm_vrfy ${SCRIPT_VAR_DEFNS_FP} +rm_vrfy ${GLOBAL_VAR_DEFNS_FP} while read crnt_line; do - printf "%s\n" "${crnt_line}" >> ${SCRIPT_VAR_DEFNS_FP} + printf "%s\n" "${crnt_line}" >> ${GLOBAL_VAR_DEFNS_FP} done <<< "${line_list}" # #----------------------------------------------------------------------- @@ -1503,12 +1987,13 @@ line_list=$( sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${SCRIPT_VAR_DEFNS_FP} ) -echo -echo "The variable \"line_list\" contains:" -echo -printf "%s\n" "${line_list}" -echo + ${GLOBAL_VAR_DEFNS_FP} ) + +print_info_msg "$VERBOSE" " +The variable \"line_list\" contains: + +${line_list} +" # #----------------------------------------------------------------------- # @@ -1539,11 +2024,11 @@ EOM # str_to_insert=${str_to_insert//$'\n'/\\n} # -# Insert str_to_insert into SCRIPT_VAR_DEFNS_FP right after the line +# Insert str_to_insert into GLOBAL_VAR_DEFNS_FP right after the line # containing the name of the interpreter. # REGEXP="(^#!.*)" -sed -i -r -e "s|$REGEXP|\1\n\n$str_to_insert\n|g" $SCRIPT_VAR_DEFNS_FP +sed -i -r -e "s|$REGEXP|\1\n\n$str_to_insert\n|g" ${GLOBAL_VAR_DEFNS_FP} @@ -1573,7 +2058,8 @@ while read crnt_line; do # if [ ! -z $var_name ]; then - printf "\n%s\n" "var_name = \"${var_name}\"" + print_info_msg "$VERBOSE" " +var_name = \"${var_name}\"" # # If the variable specified in var_name is set in the current environ- # ment (to either an empty or non-empty string), get its value and in- @@ -1649,11 +2135,12 @@ while read crnt_line; do # else - print_info_msg "\ + print_info_msg " The variable specified by \"var_name\" is not set in the current envi- ronment: var_name = \"${var_name}\" Setting its value in the variable definitions file to an empty string." + var_value="\"\"" fi @@ -1661,7 +2148,7 @@ Setting its value in the variable definitions file to an empty string." # Now place var_value on the right-hand side of the assignment statement # on the appropriate line in variable definitions file. # - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "${var_name}" "${var_value}" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "${var_name}" "${var_value}" # # If var_name is empty, then a variable name was not found in the cur- # rent line in line_list. In this case, print out a warning and move on @@ -1669,8 +2156,7 @@ Setting its value in the variable definitions file to an empty string." # else - print_info_msg "\ - + print_info_msg " Could not extract a variable name from the current line in \"line_list\" (probably because it does not contain an equal sign with no spaces on either side): @@ -1691,7 +2177,7 @@ done <<< "${line_list}" # #----------------------------------------------------------------------- # -{ cat << EOM >> $SCRIPT_VAR_DEFNS_FP +{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -1705,6 +2191,16 @@ done <<< "${line_list}" #----------------------------------------------------------------------- # +# +#----------------------------------------------------------------------- +# +# Workflow launcher script and cron table line. +# +#----------------------------------------------------------------------- +# +WFLOW_LAUNCH_SCRIPT_FP="${WFLOW_LAUNCH_SCRIPT_FP}" +WFLOW_LAUNCH_LOG_FP="${WFLOW_LAUNCH_LOG_FP}" +CRONTAB_LINE="${CRONTAB_LINE}" # #----------------------------------------------------------------------- # @@ -1718,6 +2214,7 @@ SCRIPTSDIR="$SCRIPTSDIR" JOBSDIR="$JOBSDIR" SORCDIR="$SORCDIR" PARMDIR="$PARMDIR" +MODULES_DIR="${MODULES_DIR}" EXECDIR="$EXECDIR" FIXrrfs="$FIXrrfs" FIXam="$FIXam" @@ -1727,8 +2224,9 @@ FIXupp="$FIXupp" FIXgsd="$FIXgsd" COMROOT="$COMROOT" TEMPLATE_DIR="${TEMPLATE_DIR}" +UFS_WTHR_MDL_DIR="${UFS_WTHR_MDL_DIR}" UFS_UTILS_DIR="${UFS_UTILS_DIR}" -NEMSfv3gfs_DIR="${NEMSfv3gfs_DIR}" +CHGRES_DIR="${CHGRES_DIR}" SFC_CLIMO_INPUT_DIR="${SFC_CLIMO_INPUT_DIR}" EXPTDIR="$EXPTDIR" @@ -1743,8 +2241,48 @@ SFC_CLIMO_DIR="${SFC_CLIMO_DIR}" # #----------------------------------------------------------------------- # -SCRIPT_VAR_DEFNS_FP="${SCRIPT_VAR_DEFNS_FP}" -WRTCMP_PARAMS_TEMPLATE_FP="${WRTCMP_PARAMS_TEMPLATE_FP}" +GLOBAL_VAR_DEFNS_FP="${GLOBAL_VAR_DEFNS_FP}" + +DATA_TABLE_TMPL_FN="${DATA_TABLE_TMPL_FN}" +DIAG_TABLE_TMPL_FN="${DIAG_TABLE_TMPL_FN}" +FIELD_TABLE_TMPL_FN="${FIELD_TABLE_TMPL_FN}" +FV3_NML_TMPL_FN="${FV3_NML_TMPL_FN}" +MODEL_CONFIG_TMPL_FN="${MODEL_CONFIG_TMPL_FN}" +NEMS_CONFIG_TMPL_FN="${NEMS_CONFIG_TMPL_FN}" + +DATA_TABLE_TMPL_FP="${DATA_TABLE_TMPL_FP}" +DIAG_TABLE_TMPL_FP="${DIAG_TABLE_TMPL_FP}" +FIELD_TABLE_TMPL_FP="${FIELD_TABLE_TMPL_FP}" +FV3_NML_TMPL_FP="${FV3_NML_TMPL_FP}" +MODEL_CONFIG_TMPL_FP="${MODEL_CONFIG_TMPL_FP}" +NEMS_CONFIG_TMPL_FP="${NEMS_CONFIG_TMPL_FP}" + +CCPP_PHYS_SUITE_FN="${CCPP_PHYS_SUITE_FN}" +CCPP_PHYS_SUITE_IN_CCPP_FP="${CCPP_PHYS_SUITE_IN_CCPP_FP}" +CCPP_PHYS_SUITE_FP="${CCPP_PHYS_SUITE_FP}" + +DATA_TABLE_FP="${DATA_TABLE_FP}" +FIELD_TABLE_FP="${FIELD_TABLE_FP}" +FV3_NML_FP="${FV3_NML_FP}" +NEMS_CONFIG_FP="${NEMS_CONFIG_FP}" + +WRTCMP_PARAMS_TMPL_FP="${WRTCMP_PARAMS_TMPL_FP}" +# +#----------------------------------------------------------------------- +# +# Names of the tasks in the rocoto workflow XML. +# +#----------------------------------------------------------------------- +# +MAKE_GRID_TN="${MAKE_GRID_TN}" +MAKE_OROG_TN="${MAKE_OROG_TN}" +MAKE_SFC_CLIMO_TN="${MAKE_SFC_CLIMO_TN}" +GET_EXTRN_ICS_TN="${GET_EXTRN_ICS_TN}" +GET_EXTRN_LBCS_TN="${GET_EXTRN_LBCS_TN}" +MAKE_ICS_TN="${MAKE_ICS_TN}" +MAKE_LBCS_TN="${MAKE_LBCS_TN}" +RUN_FCST_TN="${RUN_FCST_TN}" +RUN_POST_TN="${RUN_POST_TN}" # #----------------------------------------------------------------------- # @@ -1753,13 +2291,28 @@ WRTCMP_PARAMS_TEMPLATE_FP="${WRTCMP_PARAMS_TEMPLATE_FP}" # #----------------------------------------------------------------------- # -gtype="$gtype" +GTYPE="$GTYPE" TILE_RGNL="${TILE_RGNL}" -nh0_T7="$nh0_T7" -nh3_T7="$nh3_T7" -nh4_T7="$nh4_T7" +NH0="${NH0}" +NH3="${NH3}" +NH4="${NH4}" + +LON_CTR="${LON_CTR}" +LAT_CTR="${LAT_CTR}" +NX="${NX}" +NY="${NY}" +NHW="${NHW}" +STRETCH_FAC="${STRETCH_FAC}" + +RES_IN_FIXSAR_FILENAMES="${RES_IN_FIXSAR_FILENAMES}" +# +# If running the make_grid task, CRES will be set to a null string du- +# the grid generation step. It will later be set to an actual value af- +# ter the make_grid task is complete. +# +CRES="$CRES" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable definitions file returned with a nonzero status." # @@ -1772,7 +2325,7 @@ definitions file returned with a nonzero status." # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - { cat << EOM >> $SCRIPT_VAR_DEFNS_FP + { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1786,22 +2339,18 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # #----------------------------------------------------------------------- # -nhw_T7="$nhw_T7" -nx_T7="$nx_T7" -ny_T7="$ny_T7" -istart_rgnl_wide_halo_T6SG="$istart_rgnl_wide_halo_T6SG" -iend_rgnl_wide_halo_T6SG="$iend_rgnl_wide_halo_T6SG" -jstart_rgnl_wide_halo_T6SG="$jstart_rgnl_wide_halo_T6SG" -jend_rgnl_wide_halo_T6SG="$jend_rgnl_wide_halo_T6SG" -CRES="$CRES" +ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" +IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" +JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" +JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions file returned with a nonzero status." elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - { cat << EOM >> $SCRIPT_VAR_DEFNS_FP + { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1812,20 +2361,12 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # #----------------------------------------------------------------------- # -del_angle_x_SG="$del_angle_x_SG" -del_angle_y_SG="$del_angle_y_SG" -mns_nx_T7_pls_wide_halo="$mns_nx_T7_pls_wide_halo" -mns_ny_T7_pls_wide_halo="$mns_ny_T7_pls_wide_halo" -# -# The following variables must be set in order to be able to use the -# same scripting machinary for the case of GRID_GEN_METHOD set to "JP- -# grid" as for GRID_GEN_METHOD set to "GFDLgrid". -# -RES="" # This will be set after the grid generation task is complete. -CRES="" # This will be set after the grid generation task is complete. -stretch_fac="$stretch_fac" +DEL_ANGLE_X_SG="${DEL_ANGLE_X_SG}" +DEL_ANGLE_Y_SG="${DEL_ANGLE_Y_SG}" +NEG_NX_OF_DOM_WITH_WIDE_HALO="${NEG_NX_OF_DOM_WITH_WIDE_HALO}" +NEG_NY_OF_DOM_WITH_WIDE_HALO="${NEG_NY_OF_DOM_WITH_WIDE_HALO}" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions file returned with a nonzero status." @@ -1838,7 +2379,7 @@ fi # #----------------------------------------------------------------------- # -{ cat << EOM >> $SCRIPT_VAR_DEFNS_FP +{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1893,10 +2434,10 @@ LBC_UPDATE_FCST_HRS=(${LBC_UPDATE_FCST_HRS[@]}) # LBC_UPDATE_FCST_HRS is an arr # #----------------------------------------------------------------------- # -ncores_per_node="$ncores_per_node" -PE_MEMBER01="$PE_MEMBER01" +NCORES_PER_NODE="${NCORES_PER_NODE}" +PE_MEMBER01="${PE_MEMBER01}" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable definitions file returned with a nonzero status." # @@ -1906,8 +2447,7 @@ definitions file returned with a nonzero status." # #----------------------------------------------------------------------- # -print_info_msg "\ - +print_info_msg " ======================================================================== Setup script completed successfully!!! ========================================================================" @@ -1920,6 +2460,13 @@ Setup script completed successfully!!! # { restore_shell_opts; } > /dev/null 2>&1 - - +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +setup diff --git a/ush/source_funcs.sh b/ush/source_util_funcs.sh similarity index 51% rename from ush/source_funcs.sh rename to ush/source_util_funcs.sh index 2771fa649a..44417f0fe2 100644 --- a/ush/source_funcs.sh +++ b/ush/source_util_funcs.sh @@ -1,12 +1,33 @@ +function source_util_funcs() { # #----------------------------------------------------------------------- # -# Set the location to look for the sourced function definition files. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -#FUNCS_DIR=${USHDIR:-"."} -FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Set the directory in which the files defining the various utility +# functions are located. +# +#----------------------------------------------------------------------- +# + local bashutils_dir="${scrfunc_dir}/bash_utils" # #----------------------------------------------------------------------- # @@ -15,7 +36,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/save_restore_shell_opts.sh + . ${bashutils_dir}/save_restore_shell_opts.sh # #----------------------------------------------------------------------- # @@ -23,7 +44,15 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/print_msg.sh + . ${bashutils_dir}/print_msg.sh +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/set_bash_param.sh # #----------------------------------------------------------------------- # @@ -33,7 +62,15 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/set_file_param.sh + . ${bashutils_dir}/set_file_param.sh +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/count_files.sh # #----------------------------------------------------------------------- # @@ -44,7 +81,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/check_for_preexist_dir.sh + . ${bashutils_dir}/check_for_preexist_dir.sh # #----------------------------------------------------------------------- # @@ -54,7 +91,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/filesys_cmds_vrfy.sh + . ${bashutils_dir}/filesys_cmds_vrfy.sh # #----------------------------------------------------------------------- # @@ -63,7 +100,16 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/iselementof.sh + . ${bashutils_dir}/is_element_of.sh +# +#----------------------------------------------------------------------- +# +# Source the file containing the function that gets the indices of those +# elements of an array that match a given string. +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/get_elem_inds.sh # #----------------------------------------------------------------------- # @@ -72,7 +118,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/is_array.sh + . ${bashutils_dir}/is_array.sh # #----------------------------------------------------------------------- # @@ -82,7 +128,24 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/interpol_to_arbit_CRES.sh + . ${bashutils_dir}/interpol_to_arbit_CRES.sh +# +#----------------------------------------------------------------------- +# +# Source the file containing the function that checks the validity of a +# variable's value (given a set of valid values). +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/check_var_valid_value.sh +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/print_input_args.sh # #----------------------------------------------------------------------- # @@ -92,5 +155,17 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/process_args.sh + . ${bashutils_dir}/process_args.sh +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/get_manage_externals_config_property.sh + +} +source_util_funcs + diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index 7985e9c695..a6605d939f 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -47,9 +47,22 @@ The following are variables that are passed to the shell scripts that execute the various workflow tasks but are not otherwise used in the workflow XML. --> - + + + + + + + + + + + + + ]> @@ -85,23 +104,46 @@ Variables that are not modified by the workflow generation script. &DATE_FIRST_CYCL;CC00 &DATE_LAST_CYCL;CC00 24:00:00 - &LOGDIR;/FV3_wflow.log + &LOGDIR;/FV3SAR_wflow.log - + &RSRC_MAKE_GRID; &RSRV_DEFAULT; - - &JOBSDIR;/JREGIONAL_MAKE_GRID + + + + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_GRID_TN;" "&JOBSDIR;/JREGIONAL_MAKE_GRID" &PROC_MAKE_GRID; - make_grid - &LOGDIR;/make_grid.log + &MAKE_GRID_TN; + &LOGDIR;/&MAKE_GRID_TN;.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -113,17 +155,17 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_OROG; &RSRV_DEFAULT; - &JOBSDIR;/JREGIONAL_MAKE_OROG + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_OROG_TN;" "&JOBSDIR;/JREGIONAL_MAKE_OROG" &PROC_MAKE_OROG; - make_orog - &LOGDIR;/make_orog.log + &MAKE_OROG_TN; + &LOGDIR;/&MAKE_OROG_TN;.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -142,17 +184,17 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_SFC_CLIMO; &RSRV_DEFAULT; - &JOBSDIR;/JREGIONAL_MAKE_SFC_CLIMO + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_SFC_CLIMO_TN;" "&JOBSDIR;/JREGIONAL_MAKE_SFC_CLIMO" &PROC_MAKE_SFC_CLIMO; - make_sfc_climo - &LOGDIR;/make_sfc_climo.log + &MAKE_SFC_CLIMO_TN; + &LOGDIR;/&MAKE_SFC_CLIMO_TN;.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -164,8 +206,8 @@ Variables that are not modified by the workflow generation script. &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/make_orog_task_complete.txt + + &LOGDIR;/&MAKE_OROG_TN;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE @@ -176,17 +218,17 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_GET_EXTRN_MDL_FILES; &RSRV_HPSS; - &JOBSDIR;/JREGIONAL_GET_EXTRN_FILES + &LOAD_MODULES_RUN_TASK_FP; "&GET_EXTRN_ICS_TN;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_FILES" &PROC_GET_EXTRN_MDL_FILES; - get_files_ICS - &LOGDIR;/get_files_ICS_@Y@m@d@H.log + &GET_EXTRN_ICS_TN; + &LOGDIR;/&GET_EXTRN_ICS_TN;_@Y@m@d@H.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; CDATE@Y@m@d@H PDY@Y@m@d @@ -198,17 +240,17 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_GET_EXTRN_MDL_FILES; &RSRV_HPSS; - &JOBSDIR;/JREGIONAL_GET_EXTRN_FILES + &LOAD_MODULES_RUN_TASK_FP; "&GET_EXTRN_LBCS_TN;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_FILES" &PROC_GET_EXTRN_MDL_FILES; - get_files_LBCS - &LOGDIR;/get_files_LBCS_@Y@m@d@H.log + &GET_EXTRN_LBCS_TN; + &LOGDIR;/&GET_EXTRN_LBCS_TN;_@Y@m@d@H.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; CDATE@Y@m@d@H PDY@Y@m@d @@ -220,37 +262,37 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_ICS_SURF_LBC0; &RSRV_DEFAULT; - &JOBSDIR;/JREGIONAL_MAKE_IC_LBC0 + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_ICS_TN;" "&JOBSDIR;/JREGIONAL_MAKE_ICS" &PROC_MAKE_ICS_SURF_LBC0; - make_ICS_surf_LBC0 - &LOGDIR;/make_ICS_surf_LBC0_@Y@m@d@H.log + &MAKE_ICS_TN; + &LOGDIR;/&MAKE_ICS_TN;_@Y@m@d@H.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; CDATE@Y@m@d@H PDY@Y@m@d - + &LOGDIR;/make_grid_task_complete.txt &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/make_orog_task_complete.txt + + &LOGDIR;/&MAKE_OROG_TN;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE - - &LOGDIR;/make_sfc_climo_task_complete.txt + + &LOGDIR;/&MAKE_SFC_CLIMO_TN;_task_complete.txt &RUN_TASK_MAKE_SFC_CLIMO;FALSE @@ -261,37 +303,37 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_LBC1_TO_LBCN; &RSRV_DEFAULT; - &JOBSDIR;/JREGIONAL_MAKE_LBC1_TO_LBCN + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_LBCS_TN;" "&JOBSDIR;/JREGIONAL_MAKE_LBCS" &PROC_MAKE_LBC1_TO_LBCN; - make_LBC1_to_LBCN - &LOGDIR;/make_LBC1_to_LBCN_@Y@m@d@H.log + &MAKE_LBCS_TN; + &LOGDIR;/&MAKE_LBCS_TN;_@Y@m@d@H.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; CDATE@Y@m@d@H PDY@Y@m@d - + &LOGDIR;/make_grid_task_complete.txt &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/make_orog_task_complete.txt + + &LOGDIR;/&MAKE_OROG_TN;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE - - &LOGDIR;/make_sfc_climo_task_complete.txt + + &LOGDIR;/&MAKE_SFC_CLIMO_TN;_task_complete.txt &RUN_TASK_MAKE_SFC_CLIMO;FALSE @@ -302,25 +344,25 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + - &RSRC_RUN_FV3; - &RSRV_RUN_FV3; + &RSRC_RUN_FCST; + &RSRV_RUN_FCST; - &JOBSDIR;/JREGIONAL_RUN_FV3 - &PROC_RUN_FV3; - run_FV3 - &LOGDIR;/run_FV3_@Y@m@d@H.log + &LOAD_MODULES_RUN_TASK_FP; "&RUN_FCST_TN;" "&JOBSDIR;/JREGIONAL_RUN_FCST" + &PROC_RUN_FCST; + &RUN_FCST_TN; + &LOGDIR;/&RUN_FCST_TN;_@Y@m@d@H.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; CDATE@Y@m@d@H PDY@Y@m@d - - + + @@ -329,21 +371,21 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &FHR; - + &RSRC_POST; &RSRV_DEFAULT; - &JOBSDIR;/JREGIONAL_RUN_POST + &LOAD_MODULES_RUN_TASK_FP; "&RUN_POST_TN;" "&JOBSDIR;/JREGIONAL_RUN_POST" &PROC_POST; - run_post_#fhr# - &LOGDIR;/run_post_#fhr#_@Y@m@d@H.log + &RUN_POST_TN;_#fhr# + &LOGDIR;/&RUN_POST_TN;_#fhr#_@Y@m@d@H.log - SCRIPT_VAR_DEFNS_FP&SCRIPT_VAR_DEFNS_FP; + GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; CDATE@Y@m@d@H PDY@Y@m@d diff --git a/ush/templates/diag_table b/ush/templates/diag_table.FV3_GFS_2017_gfdlmp similarity index 100% rename from ush/templates/diag_table rename to ush/templates/diag_table.FV3_GFS_2017_gfdlmp diff --git a/ush/templates/diag_table_ccpp_gsd b/ush/templates/diag_table.FV3_GSD_SAR similarity index 97% rename from ush/templates/diag_table_ccpp_gsd rename to ush/templates/diag_table.FV3_GSD_SAR index d19a509c45..f86aca2a61 100644 --- a/ush/templates/diag_table_ccpp_gsd +++ b/ush/templates/diag_table.FV3_GSD_SAR @@ -211,15 +211,15 @@ "gfs_sfc", "soilw7", "soilw7" "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "soilw8", "soilw8" "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "soilw9", "soilw9" "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill1", "soill1", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill2", "soill2", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill3", "soill3", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill4", "soill4", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill5", "soill5", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill6", "soill6", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill7", "soill7", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill8", "soill8", "fv3_history2d", "all", .false., "none", 2 -"gfs_sfc", "soill9", "soill9", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_5", "soill5", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_6", "soill6", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_7", "soill7", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_8", "soill8", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_9", "soill9", "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 diff --git a/ush/templates/diag_table.FV3_GSD_v0 b/ush/templates/diag_table.FV3_GSD_v0 new file mode 100644 index 0000000000..f86aca2a61 --- /dev/null +++ b/ush/templates/diag_table.FV3_GSD_v0 @@ -0,0 +1,355 @@ +.Z..32bit.non-hydro.regional +
0 0 + +"grid_spec", -1, "months", 1, "days", "time" +"atmos_static", -1, "hours", 1, "hours", "time" +#"atmos_4xdaily", 1, "hours", 1, "days", "time" +"fv3_history", 0, "hours", 1, "hours", "time" +"fv3_history2d", 0, "hours", 1, "hours", "time" + +# +#======================= +# ATMOSPHERE DIAGNOSTICS +#======================= +### +# grid_spec +### + "dynamics", "grid_lon", "grid_lon", "grid_spec", "all", .false., "none", 2, + "dynamics", "grid_lat", "grid_lat", "grid_spec", "all", .false., "none", 2, + "dynamics", "grid_lont", "grid_lont", "grid_spec", "all", .false., "none", 2, + "dynamics", "grid_latt", "grid_latt", "grid_spec", "all", .false., "none", 2, + "dynamics", "area", "area", "grid_spec", "all", .false., "none", 2, +### +# 4x daily output +### +# "dynamics", "slp", "slp", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "vort850", "vort850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "vort200", "vort200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "us", "us", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u1000", "u1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u850", "u850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u700", "u700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u500", "u500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u200", "u200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u100", "u100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u50", "u50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "u10", "u10", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "vs", "vs", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v1000", "v1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v850", "v850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v700", "v700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v500", "v500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v200", "v200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v100", "v100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v50", "v50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "v10", "v10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "tm", "tm", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t1000", "t1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t850", "t850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t700", "t700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t500", "t500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t200", "t200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t100", "t100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t50", "t50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "t10", "t10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "z1000", "z1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z850", "z850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z700", "z700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z500", "z500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z200", "z200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z100", "z100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z50", "z50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "z10", "z10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "w1000", "w1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w850", "w850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w700", "w700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w500", "w500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "w200", "w200", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "q1000", "q1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q850", "q850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q700", "q700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q500", "q500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q200", "q200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q100", "q100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q50", "q50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "q10", "q10", "atmos_4xdaily", "all", .false., "none", 2 +#### +# "dynamics", "rh1000", "rh1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh850", "rh850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh700", "rh700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh500", "rh500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "rh200", "rh200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg1000", "omg1000", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg850", "omg850", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg700", "omg700", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg500", "omg500", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg200", "omg200", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg100", "omg100", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg50", "omg50", "atmos_4xdaily", "all", .false., "none", 2 +# "dynamics", "omg10", "omg10", "atmos_4xdaily", "all", .false., "none", 2 +### +# gfs static data +### + "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 + "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 + "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 + "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 + "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 +### +# FV3 variabls needed for NGGPS evaluation +### +"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "refl_10cm" "refl_10cm" "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "ice_nc", "nicp", "fv3_history", "all", .false., "none", 2 +#"gfs_dyn", "rain_nc", "ntrnc", "fv3_history", "all", .false., "none", 2 + +"gfs_dyn", "wmaxup", "upvvelmax", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "wmaxdn", "dnvvelmax", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmax03", "uhmax03", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmax25", "uhmax25", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmin03", "uhmin03", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "uhmin25", "uhmin25", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "maxvort01", "maxvort01", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "maxvort02", "maxvort02", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "maxvorthy1", "maxvorthy1", "fv3_history", "all", .false., "none", 2 + +"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "DSWRFtoa", "dswrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "USWRFtoa", "uswrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ULWRFtoa", "ulwrf_avetoa","fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pwat", "pwatclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2 +#"gfs_phys", "cnvw", "cnvcldwat", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt5", "soilt5" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt6", "soilt6" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt7", "soilt7" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt8", "soilt8" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt9", "soilt9" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw5", "soilw5" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw6", "soilw6" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw7", "soilw7" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw8", "soilw8" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw9", "soilw9" "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_5", "soill5", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_6", "soill6", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_7", "soill7", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_8", "soill8", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_9", "soill9", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 + +"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2 +# Aerosols (CCN, IN) from Thompson microphysics +"gfs_phys", "nwfa", "nwfa", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "nifa", "nifa", "fv3_history", "all", .false., "none", 2 +"gfs_sfc", "nwfa2d", "nwfa2d", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "nifa2d", "nifa2d", "fv3_history2d", "all", .false., "none", 2 +# Cloud effective radii from Thompson and WSM6 microphysics +"gfs_phys", "cleffr", "cleffr", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "cieffr", "cieffr", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "cseffr", "cseffr", "fv3_history", "all", .false., "none", 2 +# Prognostic/diagnostic variables from MYNN +"gfs_phys", "QC_BL", "qc_bl", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "CLDFRA_BL", "cldfra_bl", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "EL_PBL", "el_pbl", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "QKE", "qke", "fv3_history", "all", .false., "none", 2 +"gfs_sfc", "maxmf", "maxmf", "fv3_history2d", "all", .false., "none", 2 +#"gfs_sfc", "nupdraft", "nupdrafts", "fv3_history2d", "all", .false., "none", 2 +#"gfs_sfc", "ktop_shallow", "ktop_shallow", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "zol", "zol", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "flhc", "flhc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "flqc", "flqc", "fv3_history2d", "all", .false., "none", 2 +# Prognostic/diagnostic variables from RUC LSM +"gfs_sfc", "snowfall_acc", "snowfall_acc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "swe_snowfall_acc", "swe_snowfall_acc", "fv3_history2d", "all", .false., "none", 2 + +#============================================================================================= +# +#====> This file can be used with diag_manager/v2.0a (or higher) <==== +# +# +# FORMATS FOR FILE ENTRIES (not all input values are used) +# ------------------------ +# +#"file_name", output_freq, "output_units", format, "time_units", "long_name", +# +# +#output_freq: > 0 output frequency in "output_units" +# = 0 output frequency every time step +# =-1 output frequency at end of run +# +#output_units = units used for output frequency +# (years, months, days, minutes, hours, seconds) +# +#time_units = units used to label the time axis +# (days, minutes, hours, seconds) +# +# +# FORMAT FOR FIELD ENTRIES (not all input values are used) +# ------------------------ +# +#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing +# +#time_avg = .true. or .false. +# +#packing = 1 double precision +# = 2 float +# = 4 packed 16-bit integers +# = 8 packed 1-byte (not tested?) diff --git a/ush/templates/field_table b/ush/templates/field_table.FV3_GFS_2017_gfdlmp similarity index 100% rename from ush/templates/field_table rename to ush/templates/field_table.FV3_GFS_2017_gfdlmp diff --git a/ush/templates/field_table_ccpp_gsd b/ush/templates/field_table.FV3_GSD_SAR similarity index 78% rename from ush/templates/field_table_ccpp_gsd rename to ush/templates/field_table.FV3_GSD_SAR index 18de944916..0a927de455 100644 --- a/ush/templates/field_table_ccpp_gsd +++ b/ush/templates/field_table.FV3_GSD_SAR @@ -9,26 +9,37 @@ "longname", "cloud water mixing ratio" "units", "kg/kg" "profile_type", "fixed", "surface_value=1.e30" / - "TRACER", "atmos_mod", "rainwat" - "longname", "rain mixing ratio" - "units", "kg/kg" - "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ice water mixing ratio "TRACER", "atmos_mod", "ice_wat" "longname", "cloud ice mixing ratio" "units", "kg/kg" "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic snow water mixing ratio "TRACER", "atmos_mod", "snowwat" - "longname", "snow mixing ratio" + "longname", "snow water mixing ratio" "units", "kg/kg" "profile_type", "fixed", "surface_value=1.e30" / +# prognostic graupel mixing ratio "TRACER", "atmos_mod", "graupel" "longname", "graupel mixing ratio" "units", "kg/kg" "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water number concentration + "TRACER", "atmos_mod", "water_nc" + "longname", "cloud liquid water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud ice number concentration "TRACER", "atmos_mod", "ice_nc" - "longname", "ice number concentration" + "longname", "cloud ice water number concentration" "units", "/kg" "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain number concentration "TRACER", "atmos_mod", "rain_nc" "longname", "rain number concentration" "units", "/kg" @@ -38,11 +49,7 @@ "longname", "ozone mixing ratio" "units", "kg/kg" "profile_type", "fixed", "surface_value=1.e30" / -# cloud droplets, water- and ice-friendly aerosols (Thompson) - "TRACER", "atmos_mod", "water_nc" - "longname", "cloud droplet number concentration" - "units", "/kg" - "profile_type", "fixed", "surface_value=0.0" / +# water- and ice-friendly aerosols (Thompson) "TRACER", "atmos_mod", "liq_aero" "longname", "water-friendly aerosol number concentration" "units", "/kg" diff --git a/ush/templates/field_table.FV3_GSD_v0 b/ush/templates/field_table.FV3_GSD_v0 new file mode 100644 index 0000000000..0a927de455 --- /dev/null +++ b/ush/templates/field_table.FV3_GSD_v0 @@ -0,0 +1,65 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ice water mixing ratio + "TRACER", "atmos_mod", "ice_wat" + "longname", "cloud ice mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic snow water mixing ratio + "TRACER", "atmos_mod", "snowwat" + "longname", "snow water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic graupel mixing ratio + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water number concentration + "TRACER", "atmos_mod", "water_nc" + "longname", "cloud liquid water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud ice number concentration + "TRACER", "atmos_mod", "ice_nc" + "longname", "cloud ice water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain number concentration + "TRACER", "atmos_mod", "rain_nc" + "longname", "rain number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# water- and ice-friendly aerosols (Thompson) + "TRACER", "atmos_mod", "liq_aero" + "longname", "water-friendly aerosol number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / + "TRACER", "atmos_mod", "ice_aero" + "longname", "ice-friendly aerosol number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=1.e30" / diff --git a/ush/templates/input_ccpp_gfsextern_gfsphys.nml b/ush/templates/input.nml.FV3_GFS_2017_gfdlmp old mode 100755 new mode 100644 similarity index 99% rename from ush/templates/input_ccpp_gfsextern_gfsphys.nml rename to ush/templates/input.nml.FV3_GFS_2017_gfdlmp index 877afaab75..8fbbe6b8a0 --- a/ush/templates/input_ccpp_gfsextern_gfsphys.nml +++ b/ush/templates/input.nml.FV3_GFS_2017_gfdlmp @@ -173,8 +173,8 @@ ivegsrc = 1 isot = 1 debug = .false. - oz_phys = .false. - oz_phys_2015 = .true. + oz_phys = .true. + oz_phys_2015 = .false. nstf_name = 2,1,1,0,5 iau_delthrs = 6 iaufhrs = 30 diff --git a/ush/templates/input.nml.FV3_GSD_SAR b/ush/templates/input.nml.FV3_GSD_SAR new file mode 100644 index 0000000000..11689577b5 --- /dev/null +++ b/ush/templates/input.nml.FV3_GSD_SAR @@ -0,0 +1,312 @@ +&amip_interp_nml + interp_oi_sst = .true. + use_ncep_sst = .true. + use_ncep_ice = .false. + no_anom_sst = .false. + data_set = 'reynolds_oi', + date_out_of_range = 'climo', +/ + +&atmos_model_nml + blocksize = + chksum_debug = .false. + dycore_only = .false. + fdiag = 1 + ccpp_suite = +/ + +&diag_manager_nml + prepend_date = .F. +/ + +&fms_io_nml + checksum_required = .false. + max_files_r = 100, + max_files_w = 100, +/ + +&fms_nml + clock_grain = 'ROUTINE', + domains_stack_size = 3000000, + print_memory_usage = .false. +/ + +&fv_grid_nml + grid_file = 'INPUT/grid_spec.nc' +/ + +&fv_core_nml + layout = , + io_layout = 1,1 + npx = + npy = + ntiles = 1, + npz = 64 + !grid_type = -1 + make_nh = .T. + fv_debug = .T. + range_warn = .T. + reset_eta = .F. + n_sponge = 24 + nudge_qv = .F. + tau = 5. + rf_cutoff = 20.e2 + d2_bg_k1 = 0.20 + d2_bg_k2 = 0.04 + kord_tm = -11 + kord_mt = 11 + kord_wz = 11 + kord_tr = 11 + hydrostatic = .F. + phys_hydrostatic = .F. + use_hydro_pressure = .F. + beta = 0. + a_imp = 1. + p_fac = 0.1 + k_split = 4 + n_split = 5 + nwat = 6 + na_init = 1 + d_ext = 0.0 + dnats = 0 + fv_sg_adj = 300 + d2_bg = 0. + nord = 2 + dddmp = 0.1 + d4_bg = 0.12 + vtdm4 = 0.02 + ke_bg = 0. + do_vort_damp = .true. + external_ic = .T. + external_eta = .T. + gfs_phil = .false. + nggps_ic = .T. + mountain = .F. + ncep_ic = .F. + d_con = 1.0 + delt_max = 0.002 + hord_mt = 6 + hord_vt = 6 + hord_tm = 6 + hord_dp = -6 + hord_tr = 8 + adjust_dry_mass = .F. + consv_te = 0. + do_sat_adj = .F. + consv_am = .F. + fill = .T. + dwind_2d = .F. + print_freq = 6 + warm_start = .F. + no_dycore = .false. + z_tracer = .T. + read_increment = .F. + res_latlon_dynamics = "fv3_increment.nc" + + do_schmidt = .true. + target_lat = + target_lon = + stretch_fac = +! nord_zs_filter = 4 + n_zs_filter = 0 + regional = .true. + bc_update_interval = + + full_zs_filter = .F. !unreleased feature + + nord_zs_filter = 4 + n_zs_filter = 0 ! safety +/ + +&surf_map_nml + zero_ocean = .F. + cd4 = 0.12 + cd2 = -1 + n_del2_strong = 0 + n_del2_weak = 2 + n_del4 = 1 + max_slope = 0.4 + peak_fac = 1. +/ + +&external_ic_nml + filtered_terrain = .true. + levp = 65 + gfs_dwinds = .true. + checker_tr = .F. + nt_checker = 0 +/ + +&gfs_physics_nml + fhzero = 1. + h2o_phys = .true. + ldiag3d = .false. + fhcyc = 0. + nst_anl = .true. + use_ufo = .true. + pre_rad = .false. + ncld = 5 + imp_physics = 8 +! ttendlim = 0.005 + ttendlim = 50.0 + ltaerosol = .T. + lradar = .T. + pdfcld = .false. + fhswr = 1200. + fhlwr = 1200. + ialb = 1 + iems = 1 + iaer = 111 + ico2 = 2 + isubc_sw = 2 + isubc_lw = 2 + isol = 2 + lwhtr = .true. + swhtr = .true. + cnvgwd = .false. + shal_cnv = .false. + cal_pre = .false. + redrag = .true. + dspheat = .true. + hybedmf = .false. + satmedmf = .false. + lheatstrg = .F. + do_mynnedmf = .true. + do_mynnsfclay = .false. + random_clds = .false. + trans_trac = .true. + cnvcld = .false. + imfshalcnv = 0 + imfdeepcnv = 0 + cdmbgwd = 3.5,0.25 + prslrd0 = 0. + ivegsrc = 1 + isot = 1 + debug = .false. + oz_phys = .false. + oz_phys_2015 = .true. + nstf_name = 2,1,1,0,5 + cplflx = .F. + iau_delthrs = 6 + iaufhrs = 30 + iau_inc_files = '' + do_sppt = .F. + do_shum = .F. + do_skeb = .F. + do_sfcperts = .F. + lsm = 3 + lsoil = + lsoil_lsm = 9 + iopt_dveg = 2 + iopt_crs = 1 + iopt_btr = 1 + iopt_run = 1 + iopt_sfc = 1 + iopt_frz = 1 + iopt_inf = 1 + iopt_rad = 1 + iopt_alb = 2 + iopt_snf = 4 + iopt_tbot = 2 + iopt_stc = 1 + icloud_bl = 1 + bl_mynn_tkeadvect = .true. + bl_mynn_edmf = 1 + bl_mynn_edmf_mom = 1 +/ + +&cires_ugwp_nml + knob_ugwp_solver = 2 + knob_ugwp_source = 1,1,0,0 + knob_ugwp_wvspec = 1,25,25,25 + knob_ugwp_azdir = 2,4,4,4 + knob_ugwp_stoch = 0,0,0,0 + knob_ugwp_effac = 1,1,1,1 + knob_ugwp_doaxyz = 1 + knob_ugwp_doheat = 1 + knob_ugwp_dokdis = 1 + knob_ugwp_ndx4lh = 1 + knob_ugwp_version = 0 + launch_level = 25 +/ + +&interpolator_nml + interp_method = 'conserve_great_circle' +/ + +&namsfc + FNGLAC = "global_glacier.2x2.grb", + FNMXIC = "global_maxice.2x2.grb", + FNTSFC = "RTGSST.1982.2012.monthly.clim.grb", + FNSNOC = "global_snoclim.1.875.grb", + FNZORC = "igbp", + FNALBC = "global_snowfree_albedo.bosu.t126.384.190.rg.grb", + FNALBC2 = "global_albedo4.1x1.grb", + FNAISC = "CFSR.SEAICE.1982.2012.monthly.clim.grb", + FNTG3C = "global_tg3clim.2.6x1.5.grb", + FNVEGC = "global_vegfrac.0.144.decpercent.grb", + FNVETC = "global_vegtype.igbp.t126.384.190.rg.grb", + FNSOTC = "global_soiltype.statsgo.t126.384.190.rg.grb", + FNSMCC = "global_soilmgldas.t126.384.190.grb", + FNMSKH = "seaice_newland.grb", + FNTSFA = "", + FNACNA = "", + FNSNOA = "", + FNVMNC = "global_shdmin.0.144x0.144.grb", + FNVMXC = "global_shdmax.0.144x0.144.grb", + FNSLPC = "global_slope.1x1.grb", + FNABSC = "global_mxsnoalb.uariz.t126.384.190.rg.grb", + LDEBUG =.false., + FSMCL(2) = 99999 + FSMCL(3) = 99999 + FSMCL(4) = 99999 + FTSFS = 90 + FAISS = 99999 + FSNOL = 99999 + FSICL = 99999 + FTSFL = 99999, + FAISL = 99999, + FVETL = 99999, + FSOTL = 99999, + FvmnL = 99999, + FvmxL = 99999, + FSLPL = 99999, + FABSL = 99999, + FSNOS = 99999, + FSICS = 99999, + +&nam_stochy + lon_s=768, + lat_s=384, + ntrunc=382, + SKEBNORM=1, + SKEB_NPASS=30, + SKEB_VDOF=5, + SKEB=@[SKEB], + SKEB_TAU=2.16E4, + SKEB_LSCALE=1000.E3, + SHUM=@[SHUM], + SHUM_TAU=21600, + SHUM_LSCALE=500000, + SPPT=@[SPPT], + SPPT_TAU=21600, + SPPT_LSCALE=500000, + SPPT_LOGIT=.TRUE., + SPPT_SFCLIMIT=.TRUE., + ISEED_SHUM=1, + ISEED_SKEB=2, + ISEED_SPPT=3, +/ +&nam_sfcperts + NSFCPERT=6, + PERTZ0=-999., + PERTSHC=-999., + PERTZT=-999., + PERTLAI=-999., + PERTVEGF=-999., + PERTALB=-999., + SFC_TAU=21600, + SFC_LSCALE=500000, + ISEED_SFC=0, + SPPT_LAND=.FALSE., +/ diff --git a/ush/templates/input.nml.FV3_GSD_v0 b/ush/templates/input.nml.FV3_GSD_v0 new file mode 100644 index 0000000000..4ba9a2c5c4 --- /dev/null +++ b/ush/templates/input.nml.FV3_GSD_v0 @@ -0,0 +1,302 @@ +&amip_interp_nml + interp_oi_sst = .true. + use_ncep_sst = .true. + use_ncep_ice = .false. + no_anom_sst = .false. + data_set = 'reynolds_oi', + date_out_of_range = 'climo', +/ + +&atmos_model_nml + blocksize = + chksum_debug = .false. + dycore_only = .false. + fdiag = 1 + ccpp_suite = +/ + +&diag_manager_nml + prepend_date = .F. +/ + +&fms_io_nml + checksum_required = .false. + max_files_r = 100, + max_files_w = 100, +/ + +&fms_nml + clock_grain = 'ROUTINE', + domains_stack_size = 3000000, + print_memory_usage = .false. +/ + +&fv_grid_nml + grid_file = 'INPUT/grid_spec.nc' +/ + +&fv_core_nml + layout = , + io_layout = 1,1 + npx = + npy = + ntiles = 1, + npz = 64 + !grid_type = -1 + make_nh = .T. + fv_debug = .T. + range_warn = .T. + reset_eta = .F. + n_sponge = 24 + nudge_qv = .F. + tau = 5. + rf_cutoff = 20.e2 + d2_bg_k1 = 0.20 + d2_bg_k2 = 0.04 + kord_tm = -11 + kord_mt = 11 + kord_wz = 11 + kord_tr = 11 + hydrostatic = .F. + phys_hydrostatic = .F. + use_hydro_pressure = .F. + beta = 0. + a_imp = 1. + p_fac = 0.1 + k_split = 4 + n_split = 5 + nwat = 6 + na_init = 1 + d_ext = 0.0 + dnats = 0 + fv_sg_adj = 300 + d2_bg = 0. + nord = 2 + dddmp = 0.1 + d4_bg = 0.12 + vtdm4 = 0.02 + ke_bg = 0. + do_vort_damp = .true. + external_ic = .T. + external_eta = .T. + gfs_phil = .false. + nggps_ic = .T. + mountain = .F. + ncep_ic = .F. + d_con = 1.0 + delt_max = 0.002 + hord_mt = 6 + hord_vt = 6 + hord_tm = 6 + hord_dp = -6 + hord_tr = 8 + adjust_dry_mass = .F. + consv_te = 0. + do_sat_adj = .F. + consv_am = .F. + fill = .T. + dwind_2d = .F. + print_freq = 6 + warm_start = .F. + no_dycore = .false. + z_tracer = .T. + read_increment = .F. + res_latlon_dynamics = "fv3_increment.nc" + + do_schmidt = .true. + target_lat = + target_lon = + stretch_fac = +! nord_zs_filter = 4 + n_zs_filter = 0 + regional = .true. + bc_update_interval = + + full_zs_filter = .F. !unreleased feature + + nord_zs_filter = 4 + n_zs_filter = 0 ! safety +/ + +&surf_map_nml + zero_ocean = .F. + cd4 = 0.12 + cd2 = -1 + n_del2_strong = 0 + n_del2_weak = 2 + n_del4 = 1 + max_slope = 0.4 + peak_fac = 1. +/ + +&external_ic_nml + filtered_terrain = .true. + levp = 65 + gfs_dwinds = .true. + checker_tr = .F. + nt_checker = 0 +/ + +&gfs_physics_nml + fhzero = 1. + h2o_phys = .true. + ldiag3d = .false. + fhcyc = 0. + nst_anl = .true. + use_ufo = .true. + pre_rad = .false. + ncld = 5 + imp_physics = 8 +! ttendlim = 0.005 + ttendlim = 50.0 + ltaerosol = .T. + lradar = .T. + pdfcld = .false. + fhswr = 1200. + fhlwr = 1200. + ialb = 1 + iems = 1 + iaer = 111 + ico2 = 2 + isubc_sw = 2 + isubc_lw = 2 + isol = 2 + lwhtr = .true. + swhtr = .true. + cnvgwd = .true. + shal_cnv = .true. + cal_pre = .false. + redrag = .true. + dspheat = .true. + hybedmf = .false. + satmedmf = .false. + lheatstrg = .F. + do_mynnedmf = .true. + do_mynnsfclay = .false. + random_clds = .false. + trans_trac = .true. + cnvcld = .true. + imfshalcnv = 3 + imfdeepcnv = 3 + cdmbgwd = 3.5,0.25 + prslrd0 = 0. + ivegsrc = 1 + isot = 1 + debug = .false. + oz_phys = .false. + oz_phys_2015 = .true. + nstf_name = 2,1,1,0,5 + cplflx = .F. + iau_delthrs = 6 + iaufhrs = 30 + iau_inc_files = '' + do_sppt = .F. + do_shum = .F. + do_skeb = .F. + do_sfcperts = .F. + lsm = 3 + lsoil = + lsoil_lsm = 9 + icloud_bl = 1 + bl_mynn_tkeadvect = .true. + bl_mynn_edmf = 1 + bl_mynn_edmf_mom = 1 +/ + +&cires_ugwp_nml + knob_ugwp_solver = 2 + knob_ugwp_source = 1,1,0,0 + knob_ugwp_wvspec = 1,25,25,25 + knob_ugwp_azdir = 2,4,4,4 + knob_ugwp_stoch = 0,0,0,0 + knob_ugwp_effac = 1,1,1,1 + knob_ugwp_doaxyz = 1 + knob_ugwp_doheat = 1 + knob_ugwp_dokdis = 1 + knob_ugwp_ndx4lh = 1 + knob_ugwp_version = 0 + launch_level = 25 +/ + +&interpolator_nml + interp_method = 'conserve_great_circle' +/ + +&namsfc + FNGLAC = "global_glacier.2x2.grb", + FNMXIC = "global_maxice.2x2.grb", + FNTSFC = "RTGSST.1982.2012.monthly.clim.grb", + FNSNOC = "global_snoclim.1.875.grb", + FNZORC = "igbp", + FNALBC = "global_snowfree_albedo.bosu.t126.384.190.rg.grb", + FNALBC2 = "global_albedo4.1x1.grb", + FNAISC = "CFSR.SEAICE.1982.2012.monthly.clim.grb", + FNTG3C = "global_tg3clim.2.6x1.5.grb", + FNVEGC = "global_vegfrac.0.144.decpercent.grb", + FNVETC = "global_vegtype.igbp.t126.384.190.rg.grb", + FNSOTC = "global_soiltype.statsgo.t126.384.190.rg.grb", + FNSMCC = "global_soilmgldas.t126.384.190.grb", + FNMSKH = "seaice_newland.grb", + FNTSFA = "", + FNACNA = "", + FNSNOA = "", + FNVMNC = "global_shdmin.0.144x0.144.grb", + FNVMXC = "global_shdmax.0.144x0.144.grb", + FNSLPC = "global_slope.1x1.grb", + FNABSC = "global_mxsnoalb.uariz.t126.384.190.rg.grb", + LDEBUG =.false., + FSMCL(2) = 99999 + FSMCL(3) = 99999 + FSMCL(4) = 99999 + FTSFS = 90 + FAISS = 99999 + FSNOL = 99999 + FSICL = 99999 + FTSFL = 99999, + FAISL = 99999, + FVETL = 99999, + FSOTL = 99999, + FvmnL = 99999, + FvmxL = 99999, + FSLPL = 99999, + FABSL = 99999, + FSNOS = 99999, + FSICS = 99999, +/ + +&nam_stochy + lon_s=768, + lat_s=384, + ntrunc=382, + SKEBNORM=1, + SKEB_NPASS=30, + SKEB_VDOF=5, + SKEB=@[SKEB], + SKEB_TAU=2.16E4, + SKEB_LSCALE=1000.E3, + SHUM=@[SHUM], + SHUM_TAU=21600, + SHUM_LSCALE=500000, + SPPT=@[SPPT], + SPPT_TAU=21600, + SPPT_LSCALE=500000, + SPPT_LOGIT=.TRUE., + SPPT_SFCLIMIT=.TRUE., + ISEED_SHUM=1, + ISEED_SKEB=2, + ISEED_SPPT=3, +/ + +&nam_sfcperts + NSFCPERT=6, + PERTZ0=-999., + PERTSHC=-999., + PERTZT=-999., + PERTLAI=-999., + PERTVEGF=-999., + PERTALB=-999., + SFC_TAU=21600, + SFC_LSCALE=500000, + ISEED_SFC=0, + SPPT_LAND=.FALSE., +/ diff --git a/ush/templates/input_ccpp_gfsextern_gsdphys.nml b/ush/templates/input_ccpp_gfsextern_gsdphys.nml deleted file mode 100644 index 1a55e0736a..0000000000 --- a/ush/templates/input_ccpp_gfsextern_gsdphys.nml +++ /dev/null @@ -1,255 +0,0 @@ - &amip_interp_nml - interp_oi_sst = .true. - use_ncep_sst = .true. - use_ncep_ice = .false. - no_anom_sst = .false. - data_set = 'reynolds_oi', - date_out_of_range = 'climo', -/ - - &atmos_model_nml - blocksize = - chksum_debug = .false. - dycore_only = .false. - fdiag = 1 - ccpp_suite = 'FV3_GSD_v0' -/ - -&diag_manager_nml - prepend_date = .F. -/ - - &fms_io_nml - checksum_required = .false. - max_files_r = 100, - max_files_w = 100, -/ - - &fms_nml - clock_grain = 'ROUTINE', - domains_stack_size = 3000000, - print_memory_usage = .false. -/ - - &fv_grid_nml - grid_file = 'INPUT/grid_spec.nc' -/ - - &fv_core_nml - layout = , - io_layout = 1,1 - npx = - npy = - ntiles = 1, - npz = 64 - !grid_type = -1 - make_nh = .T. - fv_debug = .T. - range_warn = .T. - reset_eta = .F. - n_sponge = 24 - nudge_qv = .F. - tau = 5. - rf_cutoff = 20.e2 - d2_bg_k1 = 0.20 - d2_bg_k2 = 0.04 - kord_tm = -11 - kord_mt = 11 - kord_wz = 11 - kord_tr = 11 - hydrostatic = .F. - phys_hydrostatic = .F. - use_hydro_pressure = .F. - beta = 0. - a_imp = 1. - p_fac = 0.1 - k_split = 4 - n_split = 5 - nwat = 6 - na_init = 1 - d_ext = 0.0 - dnats = 0 - fv_sg_adj = 300 - d2_bg = 0. - nord = 2 - dddmp = 0.1 - d4_bg = 0.12 - vtdm4 = 0.02 - ke_bg = 0. - do_vort_damp = .true. - external_ic = .T. - external_eta = .T. - gfs_phil = .false. - nggps_ic = .T. - mountain = .F. - ncep_ic = .F. - d_con = 1.0 - delt_max = 0.002 - hord_mt = 6 - hord_vt = 6 - hord_tm = 6 - hord_dp = -6 - hord_tr = 8 - adjust_dry_mass = .F. - consv_te = 0. - do_sat_adj = .F. - consv_am = .F. - fill = .T. - dwind_2d = .F. - print_freq = 6 - warm_start = .F. - no_dycore = .false. - z_tracer = .T. - read_increment = .F. - res_latlon_dynamics = "fv3_increment.nc" - - do_schmidt = .true. - target_lat = - target_lon = - stretch_fac = -!! nord_zs_filter = 4 - n_zs_filter = 0 - regional = .true. - bc_update_interval = - - full_zs_filter = .F. !unreleased feature - - nord_zs_filter = 4 - n_zs_filter = 0 ! safety -/ - -&surf_map_nml - zero_ocean = .F. - cd4 = 0.12 - cd2 = -1 - n_del2_strong = 0 - n_del2_weak = 2 - n_del4 = 1 - max_slope = 0.4 - peak_fac = 1. -/ - - &external_ic_nml - filtered_terrain = .true. - levp = 65 - gfs_dwinds = .true. - checker_tr = .F. - nt_checker = 0 -/ - - &gfs_physics_nml - fhzero = 1. - h2o_phys = .true. - ldiag3d = .false. - fhcyc = 0. - nst_anl = .true. - use_ufo = .true. - pre_rad = .false. - ncld = 5 - imp_physics = 8 - ttendlim = 0.005 - !ttendlim = 0.008 - ltaerosol = .T. - lradar = .T. - pdfcld = .false. - fhswr = 3600. - fhlwr = 3600. - ialb = 1 - iems = 1 - iaer = 111 - ico2 = 2 - isubc_sw = 2 - isubc_lw = 2 - isol = 2 - lwhtr = .true. - swhtr = .true. - cnvgwd = .true. - shal_cnv = .true. - cal_pre = .false. - redrag = .true. - dspheat = .true. - hybedmf = .false. - satmedmf = .false. - lheatstrg = .F. - do_mynnedmf = .true. - do_mynnsfclay = .false. - random_clds = .false. - trans_trac = .true. - cnvcld = .true. - imfshalcnv = 3 - imfdeepcnv = 3 - cdmbgwd = 3.5,0.25 - prslrd0 = 0. - ivegsrc = 1 - isot = 1 - debug = .false. - oz_phys = .false. - oz_phys_2015 = .true. - nstf_name = 2,1,1,0,5 - cplflx = .F. - iau_delthrs = 6 - iaufhrs = 30 - iau_inc_files = '' - do_sppt = .F. - do_shum = .F. - do_skeb = .F. - do_sfcperts = .F. - lsm = 3 - lsoil = 4 - lsoil_lsm = 9 - icloud_bl = 1 - bl_mynn_tkeadvect = .true. - bl_mynn_edmf = 1 - bl_mynn_edmf_mom = 1 -/ - - &interpolator_nml - interp_method = 'conserve_great_circle' -/ - -&namsfc - FNGLAC = "global_glacier.2x2.grb", - FNMXIC = "global_maxice.2x2.grb", - FNTSFC = "RTGSST.1982.2012.monthly.clim.grb", - FNSNOC = "global_snoclim.1.875.grb", - FNZORC = "igbp" -! FNZORC = "global_zorclim.1x1.grb", - FNALBC = "global_snowfree_albedo.bosu.t126.384.190.rg.grb", - FNALBC2 = "global_albedo4.1x1.grb", - FNAISC = "CFSR.SEAICE.1982.2012.monthly.clim.grb", - FNTG3C = "global_tg3clim.2.6x1.5.grb", - FNVEGC = "global_vegfrac.0.144.decpercent.grb", - FNVETC = "global_vegtype.igbp.t126.384.190.rg.grb", - FNSOTC = "global_soiltype.statsgo.t126.384.190.rg.grb", - FNSMCC = "global_soilmgldas.t126.384.190.grb", - FNMSKH = "seaice_newland.grb", - FNTSFA = "", - FNACNA = "", - FNSNOA = "", - FNVMNC = "global_shdmin.0.144x0.144.grb", - FNVMXC = "global_shdmax.0.144x0.144.grb", - FNSLPC = "global_slope.1x1.grb", - FNABSC = "global_mxsnoalb.uariz.t126.384.190.rg.grb", - LDEBUG =.false., - FSMCL(2) = 99999 - FSMCL(3) = 99999 - FSMCL(4) = 99999 - FTSFS = 90 - FAISS = 99999 - FSNOL = 99999 - FSICL = 99999 - FTSFL = 99999, - FAISL = 99999, - FVETL = 99999, - FSOTL = 99999, - FvmnL = 99999, - FvmxL = 99999, - FSLPL = 99999, - FABSL = 99999, - FSNOS = 99999, - FSICS = 99999, -/ -&nam_stochy -/ -&nam_sfcperts -/ diff --git a/ush/templates/input_ccpp_raphrrrextern_gsdphys.nml b/ush/templates/input_ccpp_raphrrrextern_gsdphys.nml deleted file mode 100644 index 6e5794acd7..0000000000 --- a/ush/templates/input_ccpp_raphrrrextern_gsdphys.nml +++ /dev/null @@ -1,255 +0,0 @@ - &amip_interp_nml - interp_oi_sst = .true. - use_ncep_sst = .true. - use_ncep_ice = .false. - no_anom_sst = .false. - data_set = 'reynolds_oi', - date_out_of_range = 'climo', -/ - - &atmos_model_nml - blocksize = - chksum_debug = .false. - dycore_only = .false. - fdiag = 1 - ccpp_suite = 'FV3_GSD_v0' -/ - -&diag_manager_nml - prepend_date = .F. -/ - - &fms_io_nml - checksum_required = .false. - max_files_r = 100, - max_files_w = 100, -/ - - &fms_nml - clock_grain = 'ROUTINE', - domains_stack_size = 3000000, - print_memory_usage = .false. -/ - - &fv_grid_nml - grid_file = 'INPUT/grid_spec.nc' -/ - - &fv_core_nml - layout = , - io_layout = 1,1 - npx = - npy = - ntiles = 1, - npz = 64 - !grid_type = -1 - make_nh = .T. - fv_debug = .T. - range_warn = .T. - reset_eta = .F. - n_sponge = 24 - nudge_qv = .F. - tau = 5. - rf_cutoff = 20.e2 - d2_bg_k1 = 0.20 - d2_bg_k2 = 0.04 - kord_tm = -11 - kord_mt = 11 - kord_wz = 11 - kord_tr = 11 - hydrostatic = .F. - phys_hydrostatic = .F. - use_hydro_pressure = .F. - beta = 0. - a_imp = 1. - p_fac = 0.1 - k_split = 4 - n_split = 5 - nwat = 6 - na_init = 1 - d_ext = 0.0 - dnats = 0 - fv_sg_adj = 300 - d2_bg = 0. - nord = 2 - dddmp = 0.1 - d4_bg = 0.12 - vtdm4 = 0.02 - ke_bg = 0. - do_vort_damp = .true. - external_ic = .T. - external_eta = .T. - gfs_phil = .false. - nggps_ic = .T. - mountain = .F. - ncep_ic = .F. - d_con = 1.0 - delt_max = 0.002 - hord_mt = 6 - hord_vt = 6 - hord_tm = 6 - hord_dp = -6 - hord_tr = 8 - adjust_dry_mass = .F. - consv_te = 0. - do_sat_adj = .F. - consv_am = .F. - fill = .T. - dwind_2d = .F. - print_freq = 6 - warm_start = .F. - no_dycore = .false. - z_tracer = .T. - read_increment = .F. - res_latlon_dynamics = "fv3_increment.nc" - - do_schmidt = .true. - target_lat = - target_lon = - stretch_fac = -!! nord_zs_filter = 4 - n_zs_filter = 0 - regional = .true. - bc_update_interval = - - full_zs_filter = .F. !unreleased feature - - nord_zs_filter = 4 - n_zs_filter = 0 ! safety -/ - -&surf_map_nml - zero_ocean = .F. - cd4 = 0.12 - cd2 = -1 - n_del2_strong = 0 - n_del2_weak = 2 - n_del4 = 1 - max_slope = 0.4 - peak_fac = 1. -/ - - &external_ic_nml - filtered_terrain = .true. - levp = 65 - gfs_dwinds = .true. - checker_tr = .F. - nt_checker = 0 -/ - - &gfs_physics_nml - fhzero = 1. - h2o_phys = .true. - ldiag3d = .false. - fhcyc = 0. - nst_anl = .true. - use_ufo = .true. - pre_rad = .false. - ncld = 5 - imp_physics = 8 - ttendlim = 0.005 - !ttendlim = 0.008 - ltaerosol = .T. - lradar = .T. - pdfcld = .false. - fhswr = 3600. - fhlwr = 3600. - ialb = 1 - iems = 1 - iaer = 111 - ico2 = 2 - isubc_sw = 2 - isubc_lw = 2 - isol = 2 - lwhtr = .true. - swhtr = .true. - cnvgwd = .true. - shal_cnv = .true. - cal_pre = .false. - redrag = .true. - dspheat = .true. - hybedmf = .false. - satmedmf = .false. - lheatstrg = .F. - do_mynnedmf = .true. - do_mynnsfclay = .false. - random_clds = .false. - trans_trac = .true. - cnvcld = .true. - imfshalcnv = 3 - imfdeepcnv = 3 - cdmbgwd = 3.5,0.25 - prslrd0 = 0. - ivegsrc = 1 - isot = 1 - debug = .false. - oz_phys = .false. - oz_phys_2015 = .true. - nstf_name = 2,1,1,0,5 - cplflx = .F. - iau_delthrs = 6 - iaufhrs = 30 - iau_inc_files = '' - do_sppt = .F. - do_shum = .F. - do_skeb = .F. - do_sfcperts = .F. - lsm = 3 - lsoil = 9 - lsoil_lsm = 9 - icloud_bl = 1 - bl_mynn_tkeadvect = .true. - bl_mynn_edmf = 1 - bl_mynn_edmf_mom = 1 -/ - - &interpolator_nml - interp_method = 'conserve_great_circle' -/ - -&namsfc - FNGLAC = "global_glacier.2x2.grb", - FNMXIC = "global_maxice.2x2.grb", - FNTSFC = "RTGSST.1982.2012.monthly.clim.grb", - FNSNOC = "global_snoclim.1.875.grb", - FNZORC = "igbp" -! FNZORC = "global_zorclim.1x1.grb", - FNALBC = "global_snowfree_albedo.bosu.t126.384.190.rg.grb", - FNALBC2 = "global_albedo4.1x1.grb", - FNAISC = "CFSR.SEAICE.1982.2012.monthly.clim.grb", - FNTG3C = "global_tg3clim.2.6x1.5.grb", - FNVEGC = "global_vegfrac.0.144.decpercent.grb", - FNVETC = "global_vegtype.igbp.t126.384.190.rg.grb", - FNSOTC = "global_soiltype.statsgo.t126.384.190.rg.grb", - FNSMCC = "global_soilmgldas.t126.384.190.grb", - FNMSKH = "seaice_newland.grb", - FNTSFA = "", - FNACNA = "", - FNSNOA = "", - FNVMNC = "global_shdmin.0.144x0.144.grb", - FNVMXC = "global_shdmax.0.144x0.144.grb", - FNSLPC = "global_slope.1x1.grb", - FNABSC = "global_mxsnoalb.uariz.t126.384.190.rg.grb", - LDEBUG =.false., - FSMCL(2) = 99999 - FSMCL(3) = 99999 - FSMCL(4) = 99999 - FTSFS = 90 - FAISS = 99999 - FSNOL = 99999 - FSICL = 99999 - FTSFL = 99999, - FAISL = 99999, - FVETL = 99999, - FSOTL = 99999, - FvmnL = 99999, - FvmxL = 99999, - FSLPL = 99999, - FABSL = 99999, - FSNOS = 99999, - FSICS = 99999, -/ -&nam_stochy -/ -&nam_sfcperts -/ diff --git a/ush/templates/model_configure.FV3_GFS_2017_gfdlmp b/ush/templates/model_configure.FV3_GFS_2017_gfdlmp new file mode 100644 index 0000000000..de21856353 --- /dev/null +++ b/ush/templates/model_configure.FV3_GFS_2017_gfdlmp @@ -0,0 +1,23 @@ +total_member: 1 +PE_MEMBER01: +start_year: +start_month: +start_day: +start_hour: +start_minute: 0 +start_second: 0 +nhours_fcst: +RUN_CONTINUE: .false. +ENS_SPS: .false. +dt_atmos: +cpl: .false. +calendar: 'julian' +memuse_verbose: .false. +atmos_nthreads: 2 +use_hyper_thread: .false. +ncores_per_node: +debug_affinity: .true. +restart_interval: 0 +output_1st_tstep_rst: .false. +quilting: +print_esmf: diff --git a/ush/templates/model_configure.FV3_GSD_SAR b/ush/templates/model_configure.FV3_GSD_SAR new file mode 100644 index 0000000000..de21856353 --- /dev/null +++ b/ush/templates/model_configure.FV3_GSD_SAR @@ -0,0 +1,23 @@ +total_member: 1 +PE_MEMBER01: +start_year: +start_month: +start_day: +start_hour: +start_minute: 0 +start_second: 0 +nhours_fcst: +RUN_CONTINUE: .false. +ENS_SPS: .false. +dt_atmos: +cpl: .false. +calendar: 'julian' +memuse_verbose: .false. +atmos_nthreads: 2 +use_hyper_thread: .false. +ncores_per_node: +debug_affinity: .true. +restart_interval: 0 +output_1st_tstep_rst: .false. +quilting: +print_esmf: diff --git a/ush/templates/model_configure.FV3_GSD_v0 b/ush/templates/model_configure.FV3_GSD_v0 new file mode 100644 index 0000000000..de21856353 --- /dev/null +++ b/ush/templates/model_configure.FV3_GSD_v0 @@ -0,0 +1,23 @@ +total_member: 1 +PE_MEMBER01: +start_year: +start_month: +start_day: +start_hour: +start_minute: 0 +start_second: 0 +nhours_fcst: +RUN_CONTINUE: .false. +ENS_SPS: .false. +dt_atmos: +cpl: .false. +calendar: 'julian' +memuse_verbose: .false. +atmos_nthreads: 2 +use_hyper_thread: .false. +ncores_per_node: +debug_affinity: .true. +restart_interval: 0 +output_1st_tstep_rst: .false. +quilting: +print_esmf: diff --git a/ush/templates/wrtcmp_lambert_conformal b/ush/templates/wrtcmp_lambert_conformal index e6058f96c9..ac664e0616 100644 --- a/ush/templates/wrtcmp_lambert_conformal +++ b/ush/templates/wrtcmp_lambert_conformal @@ -7,17 +7,17 @@ output_file: 'netcdf' write_nemsioflip: .false. write_fsyncflag: .false. -output_grid: # Coordinate system of output grid. -cen_lon: # central longitude -cen_lat: # central latitude -stdlat1: -stdlat2: -nx: # Number of points along x-axis. -ny: # Number of points along y-axis. -lon1: -lat1: -dx: # x-direction grid cell size. -dy: # y-direction grid cell size. +output_grid: # Coordinate system used by output grid. +cen_lon: # Reference longitude, in degrees. +cen_lat: # Reference latitude, in degrees. +stdlat1: # Latitude of first standard parallel, in degrees. +stdlat2: # Latitude of second standard parallel, in degrees. +nx: # Number of points along x-axis in Lambert conformal (x,y) plane. +ny: # Number of points along y-axis in Lambert conformal (x,y) plane. +lon1: # Longitude of grid point at lower-left corner of grid, in degrees. +lat1: # Latitude of grid point at lower-left corner of grid, in degrees. +dx: # Grid cell size in x direction, in meters. +dy: # Grid cell size in y direction, in meters. nfhout: 1 nfhmax_hf: 60 diff --git a/ush/templates/wrtcmp_regional_latlon b/ush/templates/wrtcmp_regional_latlon new file mode 100644 index 0000000000..d7122feac0 --- /dev/null +++ b/ush/templates/wrtcmp_regional_latlon @@ -0,0 +1,23 @@ + +write_groups: +write_tasks_per_group: +num_files: 2 +filename_base: 'dyn''phy' +output_file: 'netcdf' +write_nemsioflip: .false. +write_fsyncflag: .false. + +output_grid: # Coordinate system of output grid. +cen_lon: # central longitude +cen_lat: # central latitude +lon1: # longitude of lower-left point in non-rotated coordinate system (in degrees) +lat1: # latitude of lower-left +lon2: # longitude of upper-right +lat2: # latitude of upper-right +dlon: +dlat: + +nfhout: 1 +nfhmax_hf: 60 +nfhout_hf: 1 +nsout: -1 diff --git a/ush/templates/wrtcmp_rotated_latlon b/ush/templates/wrtcmp_rotated_latlon index d7122feac0..28cad6460f 100644 --- a/ush/templates/wrtcmp_rotated_latlon +++ b/ush/templates/wrtcmp_rotated_latlon @@ -8,12 +8,12 @@ write_nemsioflip: .false. write_fsyncflag: .false. output_grid: # Coordinate system of output grid. -cen_lon: # central longitude -cen_lat: # central latitude -lon1: # longitude of lower-left point in non-rotated coordinate system (in degrees) -lat1: # latitude of lower-left -lon2: # longitude of upper-right -lat2: # latitude of upper-right +cen_lon: # Longitude of center of grid, expressed in the NON-ROTATED latlon coordinate system (in degrees). This is also the longitude of the point at which the equator and prime meridian of the ROTATED coordinate system intersect (i.e. the point at which the longitude and latitude in the ROTATED latlon coordinate system are both 0). +cen_lat: # Latitude of center of grid, expressed in the NON-ROTATED latlon coordinate system (in degrees). This is also the latitude of the point at which the equator and prime meridian of the ROTATED coordinate system intersect (i.e. the point at which the longitude and latitude in the ROTATED latlon coordinate system are both 0). +lon1: # Longitude of lower-left grid point, expressed in the ROTATED latlon coordinate system (in degrees). +lat1: # Latitude of lower-left grid point, expressed in the ROTATED latlon coordinate system (in degrees). +lon2: # Longitude of upper-right grid point, expressed in the ROTATED latlon coordinate system (in degrees). +lat2: # Latitude of upper-right grid point, expressed in the ROTATED latlon coordinate system (in degrees). dlon: dlat: diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 5b29e8e57c..129d48ee11 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -1,18 +1,27 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") -valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR25km" "GSD_HRRR13km" "GSD_HRRR3km" "EMC_CONUS" "EMC_AK") +valid_vals_PREDEF_GRID_NAME=( \ +"EMC_CONUS_3km" "EMC_CONUS_coarse" "EMC_AK" \ +"GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" \ +"GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" \ +"GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" \ +"GSD_RAP13km" ) +valid_vals_EMC_GRID_NAME=("ak" "conus" "conus_c96" "conus_orig" "guam" "hi" "pr") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") -valid_vals_CCPP_PHYS_SUITE=("GFS" "GSD") -valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") +valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") +valid_vals_GFDLgrid_RES=("48" "96" "192" "384" "768" "1152" "3072") valid_vals_EXTRN_MDL_NAME_ICS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") -valid_vals_FV3GFS_DATA_TYPE=("nemsio" "grib2") +valid_vals_FV3GFS_FILE_FMT_ICS=("nemsio" "grib2") +valid_vals_FV3GFS_FILE_FMT_LBCS=("nemsio" "grib2") valid_vals_GRID_GEN_METHOD=("GFDLgrid" "JPgrid") valid_vals_PREEXISTING_DIR_METHOD=("delete" "rename" "quit") -valid_vals_gtype=("nest" "regional") -valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal") +valid_vals_GTYPE=("regional") +valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal" "regional_latlon") valid_vals_RUN_TASK_MAKE_GRID=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_OROG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_SFC_CLIMO=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_QUILTING=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") +valid_vals_PRINT_ESMF=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") +valid_vals_USE_CRON_TO_RELAUNCH=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")