From 6501083eb4cd68e01cd5390d900a831a234a584d Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Tue, 5 Apr 2022 13:22:26 -0500 Subject: [PATCH 01/33] Change Orion python version to 3.7.5 - update versions/orion.ver to change python version to 3.7.5 - using python/3.9.2 results in DA job errors Refs: #665 --- versions/orion.ver | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions/orion.ver b/versions/orion.ver index fb9052abd6..340d5f0308 100644 --- a/versions/orion.ver +++ b/versions/orion.ver @@ -8,7 +8,7 @@ export prepobs_run_ver=1.0.0 export prod_util_ver=1.2.2 export cmake_ver=3.22.1 export gempak_ver=7.5.1 -export python_ver=3.9.2 +export python_ver=3.7.5 export wrf_io_ver=1.2.0 export esmf_ver=8_0_1 export nco_ver=4.9.3 From 1de089363e23915d21fd9dbb74b71134fad171d7 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Tue, 5 Apr 2022 13:45:42 -0500 Subject: [PATCH 02/33] Retire WCOSS_C and WCOSS_DELL_P3 support from resource configs - retire WCOSS_C references from config.fv3.emc.dyn and config.resources.emc.dyn - remove WCOSS_DELL_P3 references from config.fv3.emc.dyn and config.resources.emc.dyn; WCOSS_DELL will disappear after WCOSS2 go-live Refs: #665 --- parm/config/config.fv3.emc.dyn | 4 ---- parm/config/config.resources.emc.dyn | 25 ------------------------- 2 files changed, 29 deletions(-) diff --git a/parm/config/config.fv3.emc.dyn b/parm/config/config.fv3.emc.dyn index a666115f57..8ba9e5f7f2 100755 --- a/parm/config/config.fv3.emc.dyn +++ b/parm/config/config.fv3.emc.dyn @@ -22,10 +22,6 @@ echo "BEGIN: config.fv3" if [[ "$machine" = "WCOSS2" ]]; then export npe_node_max=128 -elif [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 elif [[ "$machine" = "JET" ]]; then export npe_node_max=24 elif [[ "$machine" = "HERA" ]]; then diff --git a/parm/config/config.resources.emc.dyn b/parm/config/config.resources.emc.dyn index 05e3bd3f53..12d0094e5c 100755 --- a/parm/config/config.resources.emc.dyn +++ b/parm/config/config.resources.emc.dyn @@ -24,13 +24,6 @@ echo "BEGIN: config.resources" if [[ "$machine" = "WCOSS2" ]]; then export npe_node_max=128 -elif [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 - if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 - export npe_node_max=40 - fi -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 elif [[ "$machine" = "JET" ]]; then export npe_node_max=24 elif [[ "$machine" = "HERA" ]]; then @@ -140,7 +133,6 @@ elif [ $step = "anal" ]; then export nth_anal=10 fi if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export npe_anal=84; fi - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_anal=7; fi export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) if [[ "$machine" = "WCOSS2" ]]; then export npe_node_anal=15; fi export nth_cycle=$npe_node_max @@ -153,7 +145,6 @@ elif [ $step = "analcalc" ]; then export ntasks=$npe_analcalc export nth_analcalc=1 export npe_node_analcalc=$npe_node_max - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi elif [ $step = "analdiag" ]; then @@ -197,7 +188,6 @@ elif [ $step = "post" ]; then export npe_node_post=$npe_post export npe_node_post_gfs=$npe_post export npe_node_dwn=$npe_node_max - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_node_post=28 ; fi elif [ $step = "wafs" ]; then @@ -301,7 +291,6 @@ elif [ $step = "eobs" -o $step = "eomg" ]; then export npe_eobs=14 fi export nth_eobs=3 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_eobs=7; fi export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) if [[ "$machine" = "WCOSS2" ]]; then export npe_node_eobs=40; fi @@ -319,16 +308,9 @@ elif [ $step = "eupd" ]; then if [ $CASE = "C768" ]; then export npe_eupd=315 export nth_eupd=14 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_eupd=960 - export nth_eupd=7 - fi elif [ $CASE = "C384" ]; then export npe_eupd=270 export nth_eupd=2 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_eupd=9 - fi if [[ "$machine" = "HERA" ]]; then export npe_eupd=84 export nth_eupd=10 @@ -344,7 +326,6 @@ elif [ $step = "ecen" ]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=4 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_ecen=7; fi if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) export nth_cycle=$nth_ecen @@ -372,7 +353,6 @@ elif [ $step = "epos" ]; then export wtime_epos="00:15:00" export npe_epos=80 export nth_epos=4 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_epos=7; fi export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) elif [ $step = "postsnd" ]; then @@ -396,11 +376,6 @@ elif [ $step = "awips" ]; then export npe_node_awips=1 export nth_awips=1 export memory_awips="1GB" - if [[ "$machine" == "WCOSS_DELL_P3" ]]; then - export npe_awips=2 - export npe_node_awips=2 - export nth_awips=1 - fi elif [ $step = "gempak" ]; then From 8644ea466f5fab2ca5f6b57df3984b2f90491bb9 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 8 Apr 2022 12:23:36 -0500 Subject: [PATCH 03/33] Add cdate10 to config.prep - add cdate10 setting to config.prep to support development - use $PDY$cyc to build $cdate10 Refs: #665 --- parm/config/config.prep | 1 + 1 file changed, 1 insertion(+) diff --git a/parm/config/config.prep b/parm/config/config.prep index 8b84b1a0e1..65520cd820 100755 --- a/parm/config/config.prep +++ b/parm/config/config.prep @@ -9,6 +9,7 @@ echo "BEGIN: config.prep" . $EXPDIR/config.resources prep export DO_MAKEPREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} # Relocation and syndata QC export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} From f7847f307b9c32885cfc782345cf619049527065 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 8 Apr 2022 12:25:01 -0500 Subject: [PATCH 04/33] Change C96 DELTIM to 450 in config.fv3.emc.dyn - change C96 DELTIM to 450 based on testing on Orion - DELTIM=450 also matches current setting in develop branch Refs: #665 --- parm/config/config.fv3.emc.dyn | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/config/config.fv3.emc.dyn b/parm/config/config.fv3.emc.dyn index 8ba9e5f7f2..b3e30503f7 100755 --- a/parm/config/config.fv3.emc.dyn +++ b/parm/config/config.fv3.emc.dyn @@ -51,7 +51,7 @@ case $case_in in export WRTIOBUF="1M" ;; "C96") - export DELTIM=720 + export DELTIM=450 export layout_x=6 export layout_y=4 export layout_x_gfs=6 From fa68fd0cabcccaac49b4d1cff1b15d3e6009b9b8 Mon Sep 17 00:00:00 2001 From: "Lin.Gan" Date: Fri, 8 Apr 2022 17:39:47 +0000 Subject: [PATCH 05/33] This commit is the development seed --- .../enkfgdas/analysis/create/jenkfgdas_diag.ecf | 3 +-- .../enkfgdas/analysis/create/jenkfgdas_select_obs.ecf | 5 +++-- .../enkfgdas/analysis/create/jenkfgdas_update.ecf | 3 +-- .../enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf | 4 ++-- .../enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf | 5 +++-- ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf | 10 +++++++--- ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf | 4 ++-- .../gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf | 6 ++++++ .../gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf | 9 +++++++++ .../atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf | 6 ++++++ ecf/scripts/gdas/atmos/post/.gitignore | 1 - ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf | 5 +++++ ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf | 5 +++++ ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf | 4 ++++ ecf/scripts/gdas/jgdas_forecast.ecf | 1 + ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf | 1 + .../gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf | 6 ++++++ .../gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf | 7 +++++++ .../atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf | 7 +++++++ .../atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf | 6 ++++++ .../gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf | 6 ++++++ .../atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf | 5 +++++ ecf/scripts/gfs/atmos/post/.gitignore | 1 - ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf | 2 +- .../awips_20km_1p0/jgfs_atmos_awips_master.ecf | 6 ++++++ .../awips_g2/jgfs_atmos_awips_g2_master.ecf | 6 ++++++ .../post_processing/bulletins/jgfs_atmos_fbwind.ecf | 6 ++++++ .../grib2_wafs/jgfs_atmos_wafs_blending.ecf | 6 ++++++ .../grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf | 6 ++++++ .../grib2_wafs/jgfs_atmos_wafs_grib2.ecf | 6 ++++++ .../grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf | 6 ++++++ .../grib_wafs/jgfs_atmos_wafs_master.ecf | 6 ++++++ .../gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf | 6 ++++++ ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf | 4 ++++ ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf | 6 ++++++ ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf | 6 ++++++ ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf | 6 ++++++ ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf | 1 + 40 files changed, 173 insertions(+), 20 deletions(-) diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf index e50e886381..ba73860cdf 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf @@ -4,8 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:06:00 -#PBS -l select=1:mpiprocs=48:ompthreads=1:ncpus=48:mem=24GB -#PBS -l place=vscatter +#PBS -l place=vscatter,select=1:mpiprocs=48:ompthreads=1:ncpus=48:mem=24GB #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf index 0f14ee74ab..c0836fd996 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf @@ -4,8 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 -#PBS -l select=12:mpiprocs=40:ompthreads=3:ncpus=120 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:excl,select=12:mpiprocs=40:ompthreads=3:ncpus=120 #PBS -l debug=true model=gfs @@ -26,6 +25,7 @@ module load craype/${craype_ver} module load intel/${intel_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} +#module load cfp/${cfp_ver} module load python/${python_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} @@ -38,6 +38,7 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf index a15e18126f..625c31789d 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf @@ -4,8 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:30:00 -#PBS -l select=35:mpiprocs=9:ompthreads=14:ncpus=126 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:excl,select=35:mpiprocs=9:ompthreads=14:ncpus=126 #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf index 2a72897cfa..1640290d4d 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf @@ -4,8 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 -#PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:excl,select=3:mpiprocs=32:ompthreads=4:ncpus=128 #PBS -l debug=true model=gfs @@ -38,6 +37,7 @@ module list export FHRGRP=%FHRGRP% export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES export FHMIN_ECEN=$FHRGRP export FHMAX_ECEN=$FHRGRP export FHOUT_ECEN=$FHRGRP diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf index de0f437810..c646bd9a37 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf @@ -4,8 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:06:00 -#PBS -l select=1:mpiprocs=80:ompthreads=1:ncpus=80:mem=40GB -#PBS -l place=vscatter +#PBS -l place=vscatter,select=1:mpiprocs=80:ompthreads=1:ncpus=80:mem=40GB #PBS -l debug=true model=gfs @@ -31,6 +30,7 @@ module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load grib_util/${grib_util_ver} +#module load wgrib2/${wgrib2_ver} module list @@ -39,6 +39,7 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf index 2584b572aa..b5bcbc4b17 100755 --- a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf +++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf @@ -3,9 +3,8 @@ #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:40:00 -#PBS -l select=4:mpiprocs=128:ompthreads=1:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l walltime=01:20:00 +#PBS -l place=vscatter:excl,select=4:mpiprocs=128:ompthreads=1:ncpus=128 #PBS -l debug=true model=gfs @@ -26,8 +25,12 @@ module load craype/${craype_ver} module load intel/${intel_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} +#module load esmf/${esmf_ver} +#module load cfp/${cfp_ver} +#module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} +#module load grib_util/${grib_util_ver} module list @@ -37,6 +40,7 @@ module list export ENSGRP=%ENSGRP% export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf index 744cad198b..00b901f1ea 100755 --- a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf +++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf @@ -4,8 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:excl,select=3:mpiprocs=32:ompthreads=4:ncpus=128 #PBS -l debug=true model=gfs @@ -39,6 +38,7 @@ export FHMAX_EPOS=%FHOUT_EPOS% export FHOUT_EPOS=%FHOUT_EPOS% export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index 1fe1b33005..65ea403cf3 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -3,7 +3,7 @@ #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:10:00 +#PBS -l walltime=01:20:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 #PBS -l place=vscatter:excl #PBS -l debug=true diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index b6c9454318..d075bfac44 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -35,6 +35,12 @@ export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf index 61f7f0a17f..340cac8fc7 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -31,6 +31,15 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES + +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +export COMINgdas=${COMINgdas:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak/meta} +export COMOUTncdc=${COMOUTncdc:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf index e0ab513b33..9357af68b1 100755 --- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf @@ -30,9 +30,15 @@ module load wgrib2/${wgrib2_ver} module list +#export USE_CFP=YES export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/post/.gitignore b/ecf/scripts/gdas/atmos/post/.gitignore index 851760300f..4dc1865dd3 100644 --- a/ecf/scripts/gdas/atmos/post/.gitignore +++ b/ecf/scripts/gdas/atmos/post/.gitignore @@ -1,3 +1,2 @@ # Ignore these -jgdas_atmos_post_anl.ecf jgdas_atmos_post_f*.ecf diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf index ff4910a277..7ccb629e57 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf @@ -38,6 +38,11 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES +#### EMC developer only +export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf index 28fdd7f266..2a0b54d84e 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf @@ -38,6 +38,11 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES +#### EMC developer only +export TANKverf=${TANKverf:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf index f4a1a748f2..33f5574b01 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf @@ -36,6 +36,10 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES +#### EMC developer only +export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf index 2484d38d08..374c7e85aa 100755 --- a/ecf/scripts/gdas/jgdas_forecast.ecf +++ b/ecf/scripts/gdas/jgdas_forecast.ecf @@ -36,6 +36,7 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z +#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf index 1f981ceda5..5ccacc4958 100755 --- a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf +++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf @@ -31,6 +31,7 @@ module load cdo/${cdo_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load udunits/${udunits_ver} +module load gsl/${gsl_ver} module load nco/${nco_ver} module load wgrib2/${wgrib2_ver} diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf index 1fe1b33005..1aa4c1759c 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf @@ -3,7 +3,7 @@ #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:10:00 +#PBS -l walltime=00:20:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 #PBS -l place=vscatter:excl #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf index 2a384546d7..c60a8f34d3 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -38,6 +38,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf index 81f8e14864..37dd42c720 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf @@ -38,6 +38,13 @@ export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak/meta} +export COMINgempak=${COMINgempak:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf index e7cbbab8cc..0262af958f 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf @@ -35,6 +35,13 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +export COMINgfs=${COMINgfs:-$(compath.py -o ${envir}/${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf index 6f1d6b3ba5..683eb9de1c 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf @@ -34,6 +34,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf index 3eb0596993..2d89f54bd0 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -36,6 +36,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf index f0a1a3346f..3f9d42c8ba 100755 --- a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf @@ -33,6 +33,11 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post/.gitignore b/ecf/scripts/gfs/atmos/post/.gitignore index 01d641c46d..f80e805181 100644 --- a/ecf/scripts/gfs/atmos/post/.gitignore +++ b/ecf/scripts/gfs/atmos/post/.gitignore @@ -1,3 +1,2 @@ # Ignore these -jgfs_atmos_post_anl.ecf jgfs_atmos_post_f*.ecf diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf index 12f13ea6f3..89691034b0 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 -#PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 +#PBS -l select=1:mpiprocs=126:ompthreads=1:ncpus=126 #PBS -l place=vscatter:excl #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf index 41bcf316ce..cc66b9d6ee 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -38,6 +38,12 @@ export cycle=t%CYC%z export FHRGRP=%FHRGRP% FHRLST=%FHRLST% FCSTHR=%FCSTHR% TRDRUN=%TRDRUN% fcsthrs=%FCSTHR% export job=jgfs_awips_f%FCSTHR%_%CYC% +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf index aca8e529e8..0670644427 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf @@ -42,6 +42,12 @@ export cycle=t%CYC%z trdrun=%TRDRUN% export job="jgfs_awips_f${fcsthrs}_${cyc}" +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf index 7c246cb192..c18f4001d3 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -36,6 +36,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf index 72e69281b1..fb746ac627 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf @@ -36,6 +36,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf index 99173bb28b..da315640eb 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf @@ -36,6 +36,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf index 4f3a624aab..5c92b128e6 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf @@ -37,6 +37,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf index e611aa5499..bbd5634086 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf @@ -37,6 +37,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf index 7e56ea1b9e..14d8ab6313 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf @@ -38,6 +38,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=atmos +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf b/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf index 31e98e13a9..a051e21c25 100755 --- a/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf @@ -41,6 +41,12 @@ export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES +#### EMC developer only +COMPONENT=atmos +export COMINgfs=${COMINgfs:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf index 4eb9d4e585..8d56da862e 100755 --- a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf +++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf @@ -36,6 +36,10 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES +#### EMC developer only +export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf index 199f68adeb..a8ee959189 100755 --- a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf +++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf @@ -34,6 +34,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=wave +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf index 9f30289093..3818124625 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf @@ -34,6 +34,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=wave +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf index 192f8cd98e..9e4851d8d7 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf @@ -36,6 +36,12 @@ module list export cyc=%CYC% export cycle=t%CYC%z +#### EMC developer only +COMPONENT=wave +export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} +#### + ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf index aa65854899..363c090ce0 100755 --- a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf +++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf @@ -31,6 +31,7 @@ module load cdo/${cdo_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load udunits/${udunits_ver} +module load gsl/${gsl_ver} module load nco/${nco_ver} module load wgrib2/${wgrib2_ver} From ce4a516df58808f924445e15342893b83073d514 Mon Sep 17 00:00:00 2001 From: "Lin.Gan" Date: Fri, 8 Apr 2022 17:56:05 +0000 Subject: [PATCH 06/33] This is the second part of ecflow PR. The developer enabled ecflow files - (modify the current dev_v16 with developer testing COM assignment) --- .../enkfgdas/analysis/create/jenkfgdas_diag.ecf | 3 ++- .../enkfgdas/analysis/create/jenkfgdas_select_obs.ecf | 5 ++--- .../enkfgdas/analysis/create/jenkfgdas_update.ecf | 3 ++- .../enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf | 4 ++-- .../enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf | 5 ++--- ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf | 10 +++------- ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf | 4 ++-- .../gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf | 2 +- .../gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf | 1 - .../atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf | 1 - ecf/scripts/gdas/atmos/post/.gitignore | 1 + ecf/scripts/gdas/jgdas_forecast.ecf | 1 - ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf | 1 - .../gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gfs/atmos/post/.gitignore | 1 + ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf | 2 +- ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf | 1 - 17 files changed, 20 insertions(+), 27 deletions(-) diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf index ba73860cdf..e50e886381 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf @@ -4,7 +4,8 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:06:00 -#PBS -l place=vscatter,select=1:mpiprocs=48:ompthreads=1:ncpus=48:mem=24GB +#PBS -l select=1:mpiprocs=48:ompthreads=1:ncpus=48:mem=24GB +#PBS -l place=vscatter #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf index c0836fd996..0f14ee74ab 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf @@ -4,7 +4,8 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 -#PBS -l place=vscatter:excl,select=12:mpiprocs=40:ompthreads=3:ncpus=120 +#PBS -l select=12:mpiprocs=40:ompthreads=3:ncpus=120 +#PBS -l place=vscatter:excl #PBS -l debug=true model=gfs @@ -25,7 +26,6 @@ module load craype/${craype_ver} module load intel/${intel_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} -#module load cfp/${cfp_ver} module load python/${python_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} @@ -38,7 +38,6 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf index 625c31789d..a15e18126f 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf @@ -4,7 +4,8 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:30:00 -#PBS -l place=vscatter:excl,select=35:mpiprocs=9:ompthreads=14:ncpus=126 +#PBS -l select=35:mpiprocs=9:ompthreads=14:ncpus=126 +#PBS -l place=vscatter:excl #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf index 1640290d4d..2a72897cfa 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf @@ -4,7 +4,8 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 -#PBS -l place=vscatter:excl,select=3:mpiprocs=32:ompthreads=4:ncpus=128 +#PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 +#PBS -l place=vscatter:excl #PBS -l debug=true model=gfs @@ -37,7 +38,6 @@ module list export FHRGRP=%FHRGRP% export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES export FHMIN_ECEN=$FHRGRP export FHMAX_ECEN=$FHRGRP export FHOUT_ECEN=$FHRGRP diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf index c646bd9a37..de0f437810 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf @@ -4,7 +4,8 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:06:00 -#PBS -l place=vscatter,select=1:mpiprocs=80:ompthreads=1:ncpus=80:mem=40GB +#PBS -l select=1:mpiprocs=80:ompthreads=1:ncpus=80:mem=40GB +#PBS -l place=vscatter #PBS -l debug=true model=gfs @@ -30,7 +31,6 @@ module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load grib_util/${grib_util_ver} -#module load wgrib2/${wgrib2_ver} module list @@ -39,7 +39,6 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf index b5bcbc4b17..2584b572aa 100755 --- a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf +++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf @@ -3,8 +3,9 @@ #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=01:20:00 -#PBS -l place=vscatter:excl,select=4:mpiprocs=128:ompthreads=1:ncpus=128 +#PBS -l walltime=00:40:00 +#PBS -l select=4:mpiprocs=128:ompthreads=1:ncpus=128 +#PBS -l place=vscatter:excl #PBS -l debug=true model=gfs @@ -25,12 +26,8 @@ module load craype/${craype_ver} module load intel/${intel_ver} module load cray-mpich/${cray_mpich_ver} module load cray-pals/${cray_pals_ver} -#module load esmf/${esmf_ver} -#module load cfp/${cfp_ver} -#module load libjpeg/${libjpeg_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} -#module load grib_util/${grib_util_ver} module list @@ -40,7 +37,6 @@ module list export ENSGRP=%ENSGRP% export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf index 00b901f1ea..744cad198b 100755 --- a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf +++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf @@ -4,7 +4,8 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l place=vscatter:excl,select=3:mpiprocs=32:ompthreads=4:ncpus=128 +#PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 +#PBS -l place=vscatter:excl #PBS -l debug=true model=gfs @@ -38,7 +39,6 @@ export FHMAX_EPOS=%FHOUT_EPOS% export FHOUT_EPOS=%FHOUT_EPOS% export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index 65ea403cf3..1fe1b33005 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -3,7 +3,7 @@ #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=01:20:00 +#PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 #PBS -l place=vscatter:excl #PBS -l debug=true diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf index 340cac8fc7..52c2c21db3 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -31,7 +31,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES #### EMC developer only COMPONENT=atmos diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf index 9357af68b1..0411cb1036 100755 --- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf @@ -30,7 +30,6 @@ module load wgrib2/${wgrib2_ver} module list -#export USE_CFP=YES export cyc=%CYC% export cycle=t%CYC%z diff --git a/ecf/scripts/gdas/atmos/post/.gitignore b/ecf/scripts/gdas/atmos/post/.gitignore index 4dc1865dd3..851760300f 100644 --- a/ecf/scripts/gdas/atmos/post/.gitignore +++ b/ecf/scripts/gdas/atmos/post/.gitignore @@ -1,2 +1,3 @@ # Ignore these +jgdas_atmos_post_anl.ecf jgdas_atmos_post_f*.ecf diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf index 374c7e85aa..2484d38d08 100755 --- a/ecf/scripts/gdas/jgdas_forecast.ecf +++ b/ecf/scripts/gdas/jgdas_forecast.ecf @@ -36,7 +36,6 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z -#export USE_CFP=YES ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf index 5ccacc4958..1f981ceda5 100755 --- a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf +++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf @@ -31,7 +31,6 @@ module load cdo/${cdo_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load udunits/${udunits_ver} -module load gsl/${gsl_ver} module load nco/${nco_ver} module load wgrib2/${wgrib2_ver} diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf index 1aa4c1759c..1fe1b33005 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf @@ -3,7 +3,7 @@ #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:20:00 +#PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 #PBS -l place=vscatter:excl #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/post/.gitignore b/ecf/scripts/gfs/atmos/post/.gitignore index f80e805181..01d641c46d 100644 --- a/ecf/scripts/gfs/atmos/post/.gitignore +++ b/ecf/scripts/gfs/atmos/post/.gitignore @@ -1,2 +1,3 @@ # Ignore these +jgfs_atmos_post_anl.ecf jgfs_atmos_post_f*.ecf diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf index 89691034b0..12f13ea6f3 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 -#PBS -l select=1:mpiprocs=126:ompthreads=1:ncpus=126 +#PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 #PBS -l place=vscatter:excl #PBS -l debug=true diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf index 363c090ce0..aa65854899 100755 --- a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf +++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf @@ -31,7 +31,6 @@ module load cdo/${cdo_ver} module load hdf5/${hdf5_ver} module load netcdf/${netcdf_ver} module load udunits/${udunits_ver} -module load gsl/${gsl_ver} module load nco/${nco_ver} module load wgrib2/${wgrib2_ver} From 8a997e497a43f02f0d2e772c9d525505efec7f2a Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 11 Apr 2022 09:27:54 -0500 Subject: [PATCH 07/33] Set USE_CFP=NO for GLDAS on Orion - update gldas block in ORION.env to set USE_CFP=NO Refs: #665 --- env/ORION.env | 2 ++ 1 file changed, 2 insertions(+) diff --git a/env/ORION.env b/env/ORION.env index de989331ff..3aadd8c36f 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -81,6 +81,8 @@ elif [ $step = "anal" ]; then elif [ $step = "gldas" ]; then + export USE_CFP="NO" + nth_max=$(($npe_node_max / $npe_node_gldas)) export NTHREADS_GLDAS=${nth_gldas:-$nth_max} From e54b631c1c543f16084e641c778c1671041b1d8b Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 11 Apr 2022 09:29:44 -0500 Subject: [PATCH 08/33] Update config.fv3.emc.dyn to support R&D and write group diffs - update C384 WRITE_GROUP_GFS to be 2 to match develop and use more nodes - update WRTTASK_PER_GROUP and WRTTASK_PER_GROUP_GFS to set WCOSS2 values but then if values are greater than npe_node_max change variables to be npe_node_max (retains what works on Hera/Orion with 40 pes per node) Refs: #665 --- parm/config/config.fv3.emc.dyn | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/parm/config/config.fv3.emc.dyn b/parm/config/config.fv3.emc.dyn index b3e30503f7..ea9fca9ab7 100755 --- a/parm/config/config.fv3.emc.dyn +++ b/parm/config/config.fv3.emc.dyn @@ -45,9 +45,11 @@ case $case_in in export nth_fv3_gfs=1 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max + export WRTTASK_PER_GROUP=64 + if [[ "$WRTTASK_PER_GROUP" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP=$npe_node_max ; fi export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTTASK_PER_GROUP_GFS=64 + if [[ "$WRTTASK_PER_GROUP_GFS" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP_GFS=$npe_node_max ; fi export WRTIOBUF="1M" ;; "C96") @@ -62,9 +64,11 @@ case $case_in in export nth_fv3_gfs=1 export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max + export WRTTASK_PER_GROUP=64 + if [[ "$WRTTASK_PER_GROUP" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP=$npe_node_max ; fi export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTTASK_PER_GROUP_GFS=64 + if [[ "$WRTTASK_PER_GROUP_GFS" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP_GFS=$npe_node_max ; fi export WRTIOBUF="4M" export n_split=6 ;; @@ -80,9 +84,11 @@ case $case_in in export nth_fv3_gfs=2 export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max + export WRTTASK_PER_GROUP=64 + if [[ "$WRTTASK_PER_GROUP" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP=$npe_node_max ; fi export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTTASK_PER_GROUP_GFS=64 + if [[ "$WRTTASK_PER_GROUP_GFS" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP_GFS=$npe_node_max ; fi export WRTIOBUF="8M" ;; "C384") @@ -94,12 +100,14 @@ case $case_in in export npe_wav=35 export npe_wav_gfs=35 export nth_fv3=1 - export nth_fv3_gfs=1 + export nth_fv3_gfs=2 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 export WRTTASK_PER_GROUP=64 - export WRITE_GROUP_GFS=1 + if [[ "$WRTTASK_PER_GROUP" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP=$npe_node_max ; fi + export WRITE_GROUP_GFS=2 export WRTTASK_PER_GROUP_GFS=64 + if [[ "$WRTTASK_PER_GROUP_GFS" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP_GFS=$npe_node_max ; fi export WRTIOBUF="16M" ;; "C768") @@ -115,8 +123,10 @@ case $case_in in export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=2 export WRTTASK_PER_GROUP=64 + if [[ "$WRTTASK_PER_GROUP" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP=$npe_node_max ; fi export WRITE_GROUP_GFS=8 export WRTTASK_PER_GROUP_GFS=48 + if [[ "$WRTTASK_PER_GROUP_GFS" -gt "$npe_node_max" ]]; then export WRTTASK_PER_GROUP_GFS=$npe_node_max ; fi export WRTIOBUF="32M" ;; "C1152") From 8d6e0f69982f3d3cf27e262d2780e5e2fa389506 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 11 Apr 2022 09:33:35 -0500 Subject: [PATCH 09/33] Update config.resources.emc.dyn based on Orion testing - add memory setting for prep job (40GB) - add missing _gfs variables for anal job - add npe_node_$step greater than npe_node_max checks for the analdiag, gldas, post, and esfc jobs (set npe_node_$step to npe_node_max if so) - increase wtime_fcst_gfs to 6hrs - update echgres resources to be prior values and wall off WCOSS2 values - update eobs resources to be consistent with appropriate values from develop - wall off C768 WCOSS2 resources for eobs - update eupd resources to be consistent with develop values Updated resources performed well in two cycled tests on Orion: 1) C192C96L127 2) C384C192L127 Need to test these values on WCOSS2 and Hera for further checks. Refs: #665 --- parm/config/config.resources.emc.dyn | 61 ++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 17 deletions(-) diff --git a/parm/config/config.resources.emc.dyn b/parm/config/config.resources.emc.dyn index 12d0094e5c..7a36b7e2e1 100755 --- a/parm/config/config.resources.emc.dyn +++ b/parm/config/config.resources.emc.dyn @@ -38,6 +38,7 @@ if [ $step = "prep" -o $step = "prepbufr" ]; then eval "export npe_$step=4" eval "export npe_node_$step=2" eval "export nth_$step=1" + eval "export memory_$step=40G" elif [ $step = "waveinit" ]; then @@ -130,9 +131,14 @@ elif [ $step = "anal" ]; then export nth_anal_gfs=8 if [ $CASE = "C384" ]; then export npe_anal=160 + export npe_anal_gfs=160 export nth_anal=10 + export nth_anal_gfs=10 + fi + if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then + export npe_anal=84 + export npe_anal_gfs=84 fi - if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export npe_anal=84; fi export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) if [[ "$machine" = "WCOSS2" ]]; then export npe_node_anal=15; fi export nth_cycle=$npe_node_max @@ -152,6 +158,7 @@ elif [ $step = "analdiag" ]; then export npe_analdiag=96 # Should be at least twice npe_ediag export nth_analdiag=1 export npe_node_analdiag=$npe_analdiag + if [[ "$npe_node_analdiag" -gt "$npe_node_max" ]]; then export npe_node_analdiag=$npe_node_max ; fi export memory_analdiag="48GB" elif [ $step = "gldas" ]; then @@ -160,6 +167,7 @@ elif [ $step = "gldas" ]; then export npe_gldas=112 export nth_gldas=1 export npe_node_gldas=$npe_gldas + if [[ "$npe_node_gldas" -gt "$npe_node_max" ]]; then export npe_node_gldas=$npe_node_max ; fi export npe_gaussian=96 export nth_gaussian=1 export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) @@ -167,7 +175,7 @@ elif [ $step = "gldas" ]; then elif [ $step = "fcst" ]; then export wtime_fcst="01:30:00" - export wtime_fcst_gfs="02:30:00" + export wtime_fcst_gfs="06:00:00" export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) export nth_fcst=${nth_fv3:-2} @@ -188,6 +196,8 @@ elif [ $step = "post" ]; then export npe_node_post=$npe_post export npe_node_post_gfs=$npe_post export npe_node_dwn=$npe_node_max + if [[ "$npe_node_post" -gt "$npe_node_max" ]]; then export npe_node_post=$npe_node_max ; fi + if [[ "$npe_node_post_gfs" -gt "$npe_node_max" ]]; then export npe_node_post_gfs=$npe_node_max ; fi elif [ $step = "wafs" ]; then @@ -264,9 +274,13 @@ elif [ $step = "echgres" ]; then export wtime_echgres="00:10:00" export npe_echgres=3 - export nth_echgres=1 - export npe_node_echgres=3 - export memory_echgres="200GB" + export nth_echgres=$npe_node_max + export npe_node_echgres=1 + if [[ "$machine" == "WCOSS2" ]]; then + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + fi elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then @@ -278,21 +292,25 @@ elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then elif [ $step = "eobs" -o $step = "eomg" ]; then - - export wtime_eobs="00:10:00" + export wtime_eobs="00:15:00" export wtime_eomg="01:00:00" if [ $CASE = "C768" ]; then - export npe_eobs=480 + export npe_eobs=200 elif [ $CASE = "C384" ]; then - export npe_eobs=42 + export npe_eobs=100 elif [ $CASE = "C192" ]; then - export npe_eobs=28 + export npe_eobs=40 elif [ $CASE = "C96" -o $CASE = "C48" ]; then - export npe_eobs=14 + export npe_eobs=20 fi - export nth_eobs=3 + export nth_eobs=2 export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) - if [[ "$machine" = "WCOSS2" ]]; then export npe_node_eobs=40; fi + + if [[ "$machine" = "WCOSS2" && "$CASE" = "C768" ]]; then + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + fi elif [ $step = "ediag" ]; then @@ -306,18 +324,26 @@ elif [ $step = "eupd" ]; then export wtime_eupd="00:30:00" if [ $CASE = "C768" ]; then - export npe_eupd=315 - export nth_eupd=14 + export npe_eupd=480 + export nth_eupd=6 + if [[ "$machine" = "HERA" ]]; then + export npe_eupd=150 + export nth_eupd=40 + fi elif [ $CASE = "C384" ]; then export npe_eupd=270 export nth_eupd=2 if [[ "$machine" = "HERA" ]]; then - export npe_eupd=84 - export nth_eupd=10 + export npe_eupd=100 + export nth_eupd=40 fi elif [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export npe_eupd=42 export nth_eupd=2 + if [[ "$machine" = "HERA" ]]; then + export npe_eupd=40 + export nth_eupd=40 + fi fi export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) @@ -336,6 +362,7 @@ elif [ $step = "esfc" ]; then export wtime_esfc="00:06:00" export npe_esfc=80 export npe_node_esfc=$npe_esfc + if [[ "$npe_node_esfc" -gt "$npe_node_max" ]]; then export npe_node_esfc=$npe_node_max ; fi export nth_esfc=1 export nth_cycle=$nth_esfc export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) From 79bc101726328f6c9f0db43c7b9b6f041f7357d6 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 28 Apr 2022 12:57:12 +0000 Subject: [PATCH 10/33] Update EMC_verif-global tag to v2.9.0 in Externals.cfg Refs: #665 --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 8d62151ba2..a79f54728c 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -36,7 +36,7 @@ protocol = git required = True [EMC_verif-global] -tag = verif_global_v1.11.0 +tag = verif_global_v2.9.0 local_path = sorc/verif-global.fd repo_url = https://github.com/NOAA-EMC/EMC_verif-global.git protocol = git From 69c17163859da32536dd2ebd5d652ec773504f4b Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 28 Apr 2022 13:12:40 +0000 Subject: [PATCH 11/33] Remove reference to HOMEobsproc in NCO config.base - The HOMEobsproc setting in config.base.nco.static is not used in operations and thus not needed in this version of config.base. Refs: #399 --- parm/config/config.base.nco.static | 1 - 1 file changed, 1 deletion(-) diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 825b20587a..02da0bca83 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -64,7 +64,6 @@ export REALTIME="YES" export FIXgsi="$HOMEgfs/fix/fix_gsi" export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" export HOMEpost="$HOMEgfs" -export HOMEobsproc="/lfs/h1/ops/prod/packages/obsproc.v1.0.0" export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" # CONVENIENT utility scripts and other environment parameters From bea4f7c6de4a3c6bdb643b0634603968b46ef6a9 Mon Sep 17 00:00:00 2001 From: "Lin.Gan" Date: Thu, 28 Apr 2022 18:55:45 +0000 Subject: [PATCH 12/33] Remove Developer path assignment from ecflow script level and move it into the envir-p1.h --- ecf/include/envir-p1.h | 14 ++++++++++++++ .../gdas/atmos/gempak/jgdas_atmos_gempak.ecf | 6 ------ .../atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf | 8 -------- .../obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf | 5 ----- .../gdas/atmos/verf/jgdas_atmos_verfozn.ecf | 5 ----- .../gdas/atmos/verf/jgdas_atmos_verfrad.ecf | 5 ----- .../gdas/atmos/verf/jgdas_atmos_vminmon.ecf | 4 ---- ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf | 6 ------ .../gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf | 7 ------- .../gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf | 7 ------- .../gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf | 6 ------ .../atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf | 6 ------ .../obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf | 5 ----- .../awips_20km_1p0/jgfs_atmos_awips_master.ecf | 6 ------ .../awips_g2/jgfs_atmos_awips_g2_master.ecf | 6 ------ .../bulletins/jgfs_atmos_fbwind.ecf | 6 ------ .../grib2_wafs/jgfs_atmos_wafs_blending.ecf | 6 ------ .../grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf | 6 ------ .../grib2_wafs/jgfs_atmos_wafs_grib2.ecf | 6 ------ .../grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf | 6 ------ .../grib_wafs/jgfs_atmos_wafs_master.ecf | 6 ------ .../atmos/post_processing/jgfs_atmos_wafs_gcip.ecf | 6 ------ ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf | 4 ---- ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf | 6 ------ .../gfs/wave/post/jgfs_wave_prdgen_bulls.ecf | 6 ------ .../gfs/wave/post/jgfs_wave_prdgen_gridded.ecf | 6 ------ 26 files changed, 14 insertions(+), 146 deletions(-) diff --git a/ecf/include/envir-p1.h b/ecf/include/envir-p1.h index c6c5112cf9..7d2998be59 100644 --- a/ecf/include/envir-p1.h +++ b/ecf/include/envir-p1.h @@ -15,3 +15,17 @@ fi export DBNROOT=$SIPHONROOT if [[ ! " prod para test " =~ " ${envir} " && " ops.prod ops.para " =~ " $(whoami) " ]]; then err_exit "ENVIR must be prod, para, or test [envir-p1.h]"; fi + +#### Developer Overwrite +#### For script level COM path assignment - compath.py gfs/v16.2 +#### Result in path: ${PTMP}/${USER}/${PSLOT}/para/com/gfs/v16.2 +PTMP=/lfs/h2/emc/ptmp +PSLOT=ecfops +export COMROOT=${PTMP}/${USER}/${PSLOT}/para/com +export COMPATH=${PTMP}/${USER}/${PSLOT}/para/com/gfs +export ROTDIR="$(compath.py gfs/${gfs_ver})" + +if [ -n "%PDY:%" ]; then + export PDY=${PDY:-%PDY:%} + export CDATE=${PDY}%CYC:% +fi diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index d075bfac44..b6c9454318 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -35,12 +35,6 @@ export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf index 52c2c21db3..61f7f0a17f 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -32,14 +32,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -export COMINgdas=${COMINgdas:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak/meta} -export COMOUTncdc=${COMOUTncdc:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf index 0411cb1036..e0ab513b33 100755 --- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf @@ -33,11 +33,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf index 7ccb629e57..ff4910a277 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf @@ -38,11 +38,6 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES -#### EMC developer only -export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf index 2a0b54d84e..28fdd7f266 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf @@ -38,11 +38,6 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES -#### EMC developer only -export TANKverf=${TANKverf:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf index 33f5574b01..f4a1a748f2 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf @@ -36,10 +36,6 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES -#### EMC developer only -export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf index c60a8f34d3..2a384546d7 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -38,12 +38,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf index 37dd42c720..81f8e14864 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf @@ -38,13 +38,6 @@ export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak/meta} -export COMINgempak=${COMINgempak:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf index 0262af958f..e7cbbab8cc 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf @@ -35,13 +35,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -export COMINgfs=${COMINgfs:-$(compath.py -o ${envir}/${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf index 683eb9de1c..6f1d6b3ba5 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf @@ -34,12 +34,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf index 2d89f54bd0..3eb0596993 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -36,12 +36,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf index 3f9d42c8ba..f0a1a3346f 100755 --- a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf @@ -33,11 +33,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf index cc66b9d6ee..41bcf316ce 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -38,12 +38,6 @@ export cycle=t%CYC%z export FHRGRP=%FHRGRP% FHRLST=%FHRLST% FCSTHR=%FCSTHR% TRDRUN=%TRDRUN% fcsthrs=%FCSTHR% export job=jgfs_awips_f%FCSTHR%_%CYC% -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf index 0670644427..aca8e529e8 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf @@ -42,12 +42,6 @@ export cycle=t%CYC%z trdrun=%TRDRUN% export job="jgfs_awips_f${fcsthrs}_${cyc}" -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf index c18f4001d3..7c246cb192 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -36,12 +36,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf index fb746ac627..72e69281b1 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf @@ -36,12 +36,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf index da315640eb..99173bb28b 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf @@ -36,12 +36,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf index 5c92b128e6..4f3a624aab 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf @@ -37,12 +37,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf index bbd5634086..e611aa5499 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf @@ -37,12 +37,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf index 14d8ab6313..7e56ea1b9e 100755 --- a/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf @@ -38,12 +38,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=atmos -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf b/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf index a051e21c25..31e98e13a9 100755 --- a/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf @@ -41,12 +41,6 @@ export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES -#### EMC developer only -COMPONENT=atmos -export COMINgfs=${COMINgfs:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf index 8d56da862e..4eb9d4e585 100755 --- a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf +++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf @@ -36,10 +36,6 @@ export cyc=%CYC% export cycle=t%CYC%z export VERBOSE=YES -#### EMC developer only -export COM_IN=${COM_IN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf index a8ee959189..199f68adeb 100755 --- a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf +++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf @@ -34,12 +34,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=wave -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf index 3818124625..9f30289093 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf @@ -34,12 +34,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=wave -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf index 9e4851d8d7..192f8cd98e 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf @@ -36,12 +36,6 @@ module list export cyc=%CYC% export cycle=t%CYC%z -#### EMC developer only -COMPONENT=wave -export COMIN=${COMIN:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-$(compath.py -o ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/$COMPONENT} -#### - ############################################################ # CALL executable job script here ############################################################ From f271963bfa7160598d5efb709eaac172d0b41140 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 2 May 2022 09:58:18 -0500 Subject: [PATCH 13/33] Comment out p-dump usage for 2019 dates Comment out the if-block that sets DUMP_SUFFIX="p" when between 2019092100 and 2019110700. The p-dumps aren't available everywhere and should only be used optionally. May remove in future. Refs: #665 --- parm/config/config.base.emc.dyn | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index 821bc52e31..5eb351a64a 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -100,9 +100,9 @@ export EXPDIR="@EXPDIR@/$PSLOT" export ROTDIR="@ROTDIR@/$PSLOT" export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work export DUMP_SUFFIX="" -if [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then - export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel -fi +#if [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then +# export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +#fi export RUNDIR="$STMP/RUNDIRS/$PSLOT" export DATAROOT="$RUNDIR/$CDATE/$CDUMP" export ARCDIR="$NOSCRUB/archive/$PSLOT" From 96bc75f04452e524b0422a56a165c1a2a5177311 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 9 May 2022 11:42:36 -0500 Subject: [PATCH 14/33] Add launcher_PREP to env files and cleanup HERA.env - add launcher_PREP="srun" to Hera and Orion env files - add launcher_PREP="mpiexec" to WCOSS2 env file - cleanup prep section of HERA.env to remove unneeded module unload and load of netcdf Refs: #665 --- env/HERA.env | 4 +--- env/ORION.env | 1 + env/WCOSS2.env | 1 + 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/env/HERA.env b/env/HERA.env index 98f76d2cf3..3e854ee849 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -37,9 +37,7 @@ if [ $step = "prep" -o $step = "prepbufr" ]; then export POE="NO" export BACK="NO" export sys_tp="HERA" - - module unload netcdfp/4.7.4 - module load netcdf/4.7.0 + export launcher_PREP="srun" elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then export mpmd="--multi-prog" diff --git a/env/ORION.env b/env/ORION.env index 3aadd8c36f..aa710d7918 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -41,6 +41,7 @@ if [ $step = "prep" -o $step = "prepbufr" ]; then export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="ORION" + export launcher_PREP="srun" elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostpnt" ]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 3adfa5a84b..ab07823277 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -27,6 +27,7 @@ if [ $step = "prep" -o $step = "prepbufr" ]; then export POE=${POE:-"YES"} export BACK=${BACK:-"off"} export sys_tp="wcoss2" + export launcher_PREP="mpiexec" elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then From dd3100f07c856b065c171110f619d8491568ea9a Mon Sep 17 00:00:00 2001 From: KyleNevins-NOAA <98834025+KyleNevins-NOAA@users.noreply.github.com> Date: Thu, 12 May 2022 07:59:44 -0400 Subject: [PATCH 15/33] Add preliminary ecFlow Suite Generator (#777) Adds a preliminary version of an ecFlow suite generator in a new workflow_generator directory. The suite generator takes in a configuration as a YAML file and generates the appropriate ecFlow suite definition file, as well as copies the appropriate scripts into the correct structure as dictated by the suite. For full details on use, see the README in the workflow_generator directory. Refs #501 --- ecf/defs/GFSApp.def | 66 + workflow_generator/README.md | 232 ++ workflow_generator/__init.py__ | 0 workflow_generator/ecflow_build.yml | 68 + workflow_generator/ecflow_setup/__init.py__ | 0 .../ecflow_setup/ecflow_definitions.py | 2081 +++++++++++++++++ .../ecflow_setup/ecflow_setup.py | 653 ++++++ workflow_generator/prod.yml | 425 ++++ workflow_generator/setup_workflow.py | 114 + 9 files changed, 3639 insertions(+) create mode 100644 ecf/defs/GFSApp.def create mode 100644 workflow_generator/README.md create mode 100644 workflow_generator/__init.py__ create mode 100644 workflow_generator/ecflow_build.yml create mode 100644 workflow_generator/ecflow_setup/__init.py__ create mode 100644 workflow_generator/ecflow_setup/ecflow_definitions.py create mode 100644 workflow_generator/ecflow_setup/ecflow_setup.py create mode 100644 workflow_generator/prod.yml create mode 100644 workflow_generator/setup_workflow.py diff --git a/ecf/defs/GFSApp.def b/ecf/defs/GFSApp.def new file mode 100644 index 0000000000..c7c375ad68 --- /dev/null +++ b/ecf/defs/GFSApp.def @@ -0,0 +1,66 @@ +#### totality only +extern /task_limit:TOTALITY + +suite fcstonly + inlimit /task_limit:TOTALITY + edit PSLOT 'ecffo_c192' + edit PDY '20200906' + edit EDATE '20200907' + edit ECF_INCLUDE '/scratch1/NCEPDEV/global/Lin.Gan/git/global-workflow-develop/ecf/include' + edit ENVIR 'para' + edit MACHINE_SITE 'development' + edit CYC '00' + edit QUEUE 'batch' + edit QUEUE_SERVICE 'service' + edit PROJENVIR 'DEV' + edit HERA_ACCOUNT 'fv3-cpu' + edit EMC_USER 'Lin.Gan' + edit gfs_ver 'v16.2.0' + edit PACKAGEHOME '/scratch1/NCEPDEV/global/%EMC_USER%/para/packages/gfs.%gfs_ver%' + edit ECF_FILES '%PACKAGEHOME%/ecf/scripts/gfs' + edit PROJ 'GFS' + family GFSApp + edit NET 'gfs' + family gfs + edit RUN 'gfs' + family atmos + task jgfs_getic + task jgfs_forecast + trigger ./atmos/jgfs_getic == complete + family post + task jgfs_atmos_post_manager + trigger ../../jgfs_forecast == active + event 1 release_post000 + event 2 release_post001 + event 3 release_post002 + event 4 release_post003 + task jgfs_atmos_post_f000 + trigger ./jgfs_atmos_post_manager:release_post000 + edit FHRGRP '001' + edit FHRLST 'f000' + edit FHR 'f000' + edit HR '000' + task jgfs_atmos_post_f001 + trigger ./jgfs_atmos_post_manager:release_post001 + edit FHRGRP '002' + edit FHRLST 'f001' + edit FHR 'f001' + edit HR '001' + task jgfs_atmos_post_f002 + trigger ./jgfs_atmos_post_manager:release_post002 + edit FHRGRP '003' + edit FHRLST 'f002' + edit FHR 'f002' + edit HR '002' + task jgfs_atmos_post_f003 + trigger ./jgfs_atmos_post_manager:release_post003 + edit FHRGRP '004' + edit FHRLST 'f003' + edit FHR 'f003' + edit HR '003' + endfamily # post + endfamily # atmos + endfamily # gfs + endfamily # GFSApp +endsuite + diff --git a/workflow_generator/README.md b/workflow_generator/README.md new file mode 100644 index 0000000000..6745404b99 --- /dev/null +++ b/workflow_generator/README.md @@ -0,0 +1,232 @@ +# Workflow Setup Utility: + +## Introduction +This utility is designed to be an automated ecFlow and Rocoto generation application, +used to create the folder structures and scripts needed to execute the workflows +for either application. As of April 2022, this application only works for ecFlow. + +### How ecFlow Setup Works +For ecFlow creation, the application takes a YAML file as input, pulls in any +environment variables that are specified in the YAML, then using the ecFlow +API, a definition file is created. Additionally, since ecFlow definition files +are dependent on folder structures, the application also identifies the scripts +associated with tasks and creates the folders for them, checks the script repository +folder and puts the scripts in their appropriate location. + +Please refer to the [setup the YAML](#configuring-the-yaml-file) section for instructions +on how to setup the YAML file for what you want. + +## Setup for using the utility with ecFlow +This utility uses Python3.6 and later. It will not work with Python anything before +Python3.6. + +### Pre-Requisites +In order to run the application the following Python3 modules need to be available: +* ecflow +* numpy +* PyYAML +These modules should be available on Hera and Orion. + +### Experiment Setup +This application requires the use of a config.base file. The location of the file +can be specified with the `--expdir` parameter. The file will be read in and +the ush/rocoto/workflow_utils.py script will be used to populate any environment +variables that are needed. + +### Required Environment Variables +If not setup within the script, the following environmnt variables are required: +* Account +* Queue +* machine +* RUN_ENVIR +These parameters are populated as 'edits' within the ecFlow definition file for +any of the suites that are created. + +An additional environment variable that is needed is: +* ECFgfs +This will be used as the base location for storing the suite scripts and also +used as the base location to look for the script repository. The application +assumes the default that the script repo is ECFgfs/scripts. Suggested edit is to +add the following to the config.base file: +* export ECFgfs=$HOMEgfs/ecf + +## Configuring the YAML file +The utility works primarily off of the yaml file used to define the suites, +families, and tasks. + +### Script Repository +The scriptrepo can be specified either by an environment variable or by setting +a `scriptrepo: /path/to/scripts` variable in the YAML file at the top level. + +The script repository is the location the application will search for the ecf scripts to +deploy the to the correct location. + +### Setting up externs +To add externs, add the `externs:` label to the base level of the yaml file, +then add each extern as a yaml list object +* Example: + +```YAML +externs: +- "/prod18/enkfgdas/post" +- "/prod18/enkfgfs/post" +``` + +### Setting up a suite +To add a suite add the `suites:` label to the base level and then add the suite +names you want as dictionary objects: +* Example: + +```YAML +suites: + fcstonly: + fcstplus: +``` + +### Setting up families +Once a suite has been setup, add families as dictionary objects under the families. +Families can be dictionary objects under other families. In the example below the +fcstonly suite has the family gfs and the post family under that. The fcstplus +suite has the family gdas and the family atmos under that. +* Example + +```YAML +suites: + fcstonly: + gfs: + post: + fcstplus: + gdas: + atmos: +``` + +### Adding edits +Edits can be added to either families, tasks or suites by putting an `edit:` +dictionary tag and then listing the edits below. In the example below, the `RUN` +edit will be assigned the parameter `00` for the `gfs` family. +* Example + +```YAML +suites: + fcstonly: + gfs: + edits: + RUN: '00' +``` + +### Setting up the tasks +After the families are defined, tasks are defined by placing a `tasks:` dictionary +object under a family. Tasks are then put into dictionary objects. In the example below, the `gfs` family has an `atmos` family underneath it +with the `jgfs_forecast` task +* Example + +```YAML +gfs: + atmos: + tasks: + jgfs_forecast: +``` + +#### Task Options +Additional options are `triggers`, `events`, `edits`, and `defstatus`. + +##### Triggers +To add a trigger to a task, add a `triggers:` heading underneath the task or family. The triggers need to be a list +item with the identifier for what you want the trigger to look for. So for a task, it would be `- task: task_name` or +for a family it would be `- family: family_name` + +Trigger list items can also have events, states, or suites as part of their configuration. Valid events must be listed +in the definition file previously. Valid states are complete, active, or queued. The suite must also be a defined suite +somewhere in the YAML file. + +* Example +```YAML +gfs: + edits: + RUN: 'gfs' + NET: 'gfs' + tasks: + jgfs_forecast: + triggers: + - task: jgfs_atmos_analysis + event: release_fcst + - task: jgfs_wave_prep +``` + +##### Events +To add events to a task, add an `events:` heading underneath the task. The items in the events is a list of strings. +* Example: This has two triggers and one event associated with it. +```YAML +analysis: + tasks: + jgfs_atmos_analysis: + triggers: + - task: jobsproc_gfs_atmos_prep + - task: jgfs_atmos_emcsfc_sfc_prep + events: + - "release_fcst" +``` + +##### Edits +To add edits to a task or family or suite, add an `edits:` heading underneath the node. The items go in dictionary +format such that `edit: edit_value` +* Example +```YAML +gfs: + edits: + RUN: 'gfs' + NET: 'gfs' +``` + +##### Defstatus +To add defstatus to a task or family, add a `defstatus:` parameter underneath the node. It will have a value associated +with it such that `defstatus: value` +* Example +```YAML +obsproc: + defstatus: complete +``` +### Ranges and Lists +Families or tasks, or even triggers and events can have ranges or lists associated with them to shorten the creation +of the definition YAML. The range notation uses the `( )` bracket syntax to specify and follows the same pattern as +Python ranges. If one value is specified, it assumes it is the max value, starts at 1, increments by 1 up to the value +specified. If two values are presented, then it uses the first as the initial starting value, increments by 1 up to the +max value specified. If all three, it uses them with the initial, increment, and max range values. If no value or +no max value is specified it uses what ever values are used in the parent counter. +* Example +```YAML +jgfs_atmos_post_f( 384 ): + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager + event: release_post( ) + edits: + FHRGRP: '( 1, )' + FHRLST: 'f( )' + FHR: 'f( )' + HR: '( )' +``` + +Lists are similar to the ranges but use the `[ ]` bracket syntax. Items in the list can be of any type and will run +the same way as ranges do +* Example +```YAML +post: + tasks: + jgfs_atmos_post_manager[ 1,2 ]: +``` +## Run the utility +1. Change into the workflow directory: +` cd global-workflow/workflow_generator` +1. Run the utility +``` +python3 setup_workflow.py --expdir ../parm/config +``` + +### Command Line Options +* --ecflow-config + * Path to the YAML configuration file to use to generate the definition and folder/scripts. +* --expdir + * Path to the experiment directory. Must contain config.base. +* --savedir + * Path to where the definition file will be saved. Defaults to current directory. \ No newline at end of file diff --git a/workflow_generator/__init.py__ b/workflow_generator/__init.py__ new file mode 100644 index 0000000000..e69de29bb2 diff --git a/workflow_generator/ecflow_build.yml b/workflow_generator/ecflow_build.yml new file mode 100644 index 0000000000..e58d71941f --- /dev/null +++ b/workflow_generator/ecflow_build.yml @@ -0,0 +1,68 @@ +--- +# scriptrepo: /path/to/scripts +externs: +- "/prod18/enkfgdas/post" +suites: + fcstonly: + edits: + CYC: env.ARCH_CYC + EDATE: env.EDATE + nodes: + GFSApp: + edits: + NET: 'gfs' + gfs: + repeat: "2022032400 to 2022042400 by 18:0" + edits: + RUN: 'gfs' + atmos: + tasks: + jgfs_forecast: + triggers: + - task: jgfs_getic + - task: jgfs_atmos_post_f( 2,1 ) + - task: jgfs_forecast + suite: fcstplus + jgfs_getic: + events: + - test_event + edits: + RUN: 'gfs' + post: + tasks: + jgfs_atmos_post_manager[ 1,2 ]: + template: jgfs_atmos_post_manager + triggers: + - task: jgfs_forecast + state: active + events: + - "release_post(4)f" + jgfs_atmos_post_f(2,env.FHMAX_GFS): + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager[1,2] + event: "release_post(1,2)f" + events: + - "test_event" + edits: + FHRGRP: "( 1,,6 )" + FHRGRP2: [ a,b,c,d ] + FHRLST: "f[ a,b,3,4 ] -testing" + FHR: "f(1,6,)" + HR: "( )" + TEST: 'Test Edit' + wave: + init: + tasks: + jgfs_wave_init: + jgfs_forecast: + fcstplus: + edits: + CYC: '06' + nodes: + nonGFSApp: + tasks: + jgfs_forecast: + triggers: + - task: jgfs_getic + suite: fcstonly diff --git a/workflow_generator/ecflow_setup/__init.py__ b/workflow_generator/ecflow_setup/__init.py__ new file mode 100644 index 0000000000..e69de29bb2 diff --git a/workflow_generator/ecflow_setup/ecflow_definitions.py b/workflow_generator/ecflow_setup/ecflow_definitions.py new file mode 100644 index 0000000000..622f866ec0 --- /dev/null +++ b/workflow_generator/ecflow_setup/ecflow_definitions.py @@ -0,0 +1,2081 @@ +#!/usr/bin/env python3 + +""" + PROGRAM: + Manage the ecflow definitions setup. The main purpose of this class is + to be called by the ecflow_setup.py module and create an Ecflowsuite + object for each suite. Then the processing for triggers, breaking + apart the list or loop strings and adding in the triggers an events + using the ecflow module to call things like ecflow.Task, + ecflow.Trigger, and so on. + + The creates a dictionary object of each of the items it creates and + then uses the ecfsuite dict to reference so a task's trigger will + reference an already existing task and thereby automatically + populate the full path to the task as opposed to a more + dynamic reference. + + At the bottom of this module are the custom objects created to extend + the ecflow classes for tasks, families, + etc. + AUTHOR: + Kyle Nevins + kyle.nevins@noaa.gov + FILE DEPENDENCIES: + None + OUTPUT: + None +""" +import sys +import os +import re +import shutil +from datetime import datetime, timedelta +try: + import ecflow +except ImportError as err: + raise Exception(f"Error: Could not import ecflow module: {err}") + + +class Ecflowsuite: + """ + This class is the representation of an ecflow suite. It manages all of the + items within using a dictionary. Names for the tasks are in the dictonary + by their name so all task names in a suite need to be unique otherwise + you'll have some issues. + + Families in the dictionary are represented by the parent nodes combined + with the family name, so for example the family + gfs: + atmos: + post: + will be in the dictionary at the key gfs>atmos>post, referenced like so, + self.nodes[gfs>atmos>post]. That will contain all of the objects for + that family. + + Attributes + ---------- + ecfnodes : dict + Dictionary object that contains all the nodes within the suite. This + includes tasks, families, triggers, events, etc. + ecfhome : str + The path to the base for the ecf items. This includes the ecf scripts + repository and the storage location for all the suite script. In the + default, it is generally assumed to be the ecfGFS parameter from the + experiment setup. + build_tree : bool + A boolean that indicates if the application should build the folders + and scripts as part of the run. If this is false, the scripts and + folders are not created and assumed to already be in place. + ecfsuite : str + The name of the suite. + + Methods + ------- + add_suite(suite) + Creates the suite and if necessary creates the base folders. + + get_suite( ) + Get the ecfSuite object + + get_suite_name( ) + Returns the name of the suite + + get_task(task) + Returns a specific task from the suite. + + add_edit(edit_dict, parent=None) + Adds an edit to either a suite, task, or family. The parent defines + what object will get the edit object. + + add_event(event, parent=None) + Adds an event to the parent node. Events can only be associated with + families or tasks so if the parent is None, nothing will be added. + This was done to avoid errors. + + add_defstatus(defstatus, parent=None) + Adds an defstatus to the parent node. Defstatus objects can only be + associated with families or tasks so if the parent is None, nothing + will be added. This was done to avoid errors. + + add_repeat(repeat, parent=None) + Adds in a repeat to the parent node. Repeats can be parts of a family, + task, or suite. If the parent is none it will be added to the suite. + + add_trigger(trigger, parent, state=None, event=None, suite=None, + suite_array=None, operand=None) + Adds a trigger to the parent node. Triggers can be added to families + and tasks. + + add_family(family, parents=None) + Adds a family to the suite. If the parents value is set to none, then + it will be added as a top level family. Otherwise, it will be added as + a sub-family to the parents. + + add_task(task, parents, scriptrepo, template=None) + Adds a task to the parent node. If the build is set to true then the + method also calls the creation method in the ecfTask class to deploy + the script to the proper location. The script repo is where it will + look for the script. If template is set, it will look for that template + and then copy and change the name of the template at the destination to + the name of the task. + + add_task_edits(task, edit_dict) + Adds edits to a task. This takes in the edit_dict and then calls the + add_edit method to apply them to that task. + + add_task_repeat(task, repeat) + Adds a repeats to task nodes. This function primarily breaks down the + tasks into lists or ranges based on the task string and then adds the + repeat to the breakout. + + add_task_defstatus(task, defstatus) + Adds a defstatus to a task node. This function breaks down the task + string into a range or list if necessary and then adds the calls the + add_defstatus method. + + add_task_events(task, events) + Adds events to a task. This function breaks down the task string into + ranges or lists if necessary but also breaks down the events if those + are a list or range. It then passes the fully formed pieces to the + add_event method to add them to the suite. + + add_task_triggers(task, triggers, suite_array) + Adds triggers to a task. This is a fairly complex method and might be + able to be broken into smaller pieces at some point. The triggers + can be loops in themselves, based on a task with an event or a loop of + events. Or even a loop of other tasks from other suites. This function + breaks down the tasks themselves and then also any loop/list logic that + exists within the trigger and applies them to the task with the + add_trigger method. + """ + + def __init__(self, ecfsuite, ecfhome, build_tree=True): + """ + Parameters + ---------- + ecfhome : str + The path to the base for the ecf items. This includes the ecf + scripts repository and the storage location for all the suite + script. In the default, it is generally assumed to be the ecfGFS + parameter from the experiment setup. + build_tree : bool + A boolean that indicates if the application should build the + folders and scripts as part of the run. If this is false, the + scripts and folders are not created and assumed to already be + in place. + ecfsuite : str + The name of the suite. + """ + + # Initialize environment + self.ecfnodes = {} + self.ecfhome = ecfhome + self.build_tree = build_tree + + # Create initial suite + self.ecfsuite = self.add_suite(ecfsuite) + + def add_suite(self, suite): + """ + Creates the suite object and if necessary creates the base folders. + + Parameters + ---------- + suite : str + Name of the suite object. + + Returns + ------- + new_suite : ecfSuite object + An ecfSuite object + """ + + new_suite = ecfSuite(f"{suite}") + if self.build_tree: + new_suite.generate_folders(self.ecfhome) + return new_suite + + def get_suite(self): + """ + Get the ecfSuite object + + Returns + ------- + ecfSuite + The ecfsuite object that has all the contents + """ + + return self.ecfsuite + + def get_suite_name(self): + """ + Returns the name of the suite + + Returns + ------- + name : str + The name of the suite. + """ + + return self.ecfsuite.name() + + def get_task(self, task): + """ + Returns a specific task from the suite. + + Parameters + ---------- + task : str + The name of the task to lookup in the ecfnodes dictionary. + + Returns + ------- + ecfTask + An ecfTask that is an extension of the ecflow.task object. + """ + + return self.ecfnodes[task] + + def add_edit(self, edit_dict, parent=None): + """ + Adds an edit to either a suite, task, or family. The parent defines + what object will get the edit object. + + Parameters + ---------- + edit_dict : dict + Dictionary object that contains the edits in the form of + {"edit" : "value"} + parent : str + String for the parent node that will get the edits added. + + Returns + ------- + None + """ + + if parent: + self.ecfnodes[parent] += ecflow.Edit(edit_dict) + else: + self.ecfsuite += ecflow.Edit(edit_dict) + + def add_event(self, event, parent=None): + """ + Adds an event to the parent node. Events can only be associated with + families or tasks so if the parent is None, nothing will be added. + This was done to avoid errors. + + Parameters + ---------- + event : str + A string that is passed to the ecflow.Event object + parent : str + String for the parent node that will get the events added. + + Returns + ------- + None + """ + + if parent: + self.ecfnodes[parent] += ecflow.Event(event) + + def add_defstatus(self, defstatus, parent=None): + """ + Adds an defstatus to the parent node. Defstatus objects can only be + associated with families or tasks so if the parent is None, nothing + will be added. This was done to avoid errors. + + Parameters + ---------- + defstatus : str + A string that is passed to the ecflow.Defstatus object + parent : str + String for the parent node that will get the defstatus added. + + Returns + ------- + None + """ + + if parent: + self.ecfnodes[parent] += ecflow.Defstatus(defstatus) + + def add_repeat(self, repeat, parent=None): + """ + Adds in a repeat to the parent node. Repeats can be parts of a family, + task, or suite. If the parent is none it will be added to the suite. + + This will calculate the difference between the two dates and use the + interval value from the third entry to identify how often. Due to the + fact that ecflow has a very simplistic time/date/interval + implementation, this function can render the dates in multiple + different fashions. + + If the start and end are the same day, it'll just use a time set. If + it is different days, it'll do a relative time set with the dates and + also a start time. If it is multiple dates it will throw in repeats + based on relative values. + + Parameters + ---------- + repeat : str + This is a date string in the format of YYYYMMDDHH to YYYYMMDDHH by + DD:HH:MM. The hours on the second date string are optional as are + the day parameters in the time string. + parent : str + + Returns + ------- + None + """ + + repeat_token = re.search( + "(\d{8,10})( | to )(\d{10})( | by )(\d{1,2}:)?(\d{1,2}:\d{1,2})", + repeat) + start = repeat_token.group(1).strip() + end = repeat_token.group(3).strip() + byday = repeat_token.group(5).strip() if repeat_token.group(5) is not \ + None else repeat_token.group(5) + bytime = repeat_token.group(6).strip() + + startdate = datetime.strptime(start, "%Y%m%d%H") if len(start) == 10 \ + else datetime.strptime(start, "%Y%m%d") + enddate = datetime.strptime(end, "%Y%m%d%H") + if byday is not None: + delta = timedelta(days=int(byday.split(':')[0]), + hours=int(bytime.split(':')[0]), + minutes=int(bytime.split(':')[1])) + else: + delta = timedelta(hours=int(bytime.split(':')[0]), + minutes=int(bytime.split(':')[1])) + + total_runtime = enddate - startdate + + if parent: + targetnode = self.ecfnodes[parent] + else: + targetnode = self.ecfsuite + + try: + if total_runtime.total_seconds() < delta.total_seconds(): + raise ConfigurationError + except ConfigurationError: + if parent: + print(f"Node: {parent} - " + "Repeat has a greater increment than total time.") + else: + print(f"Suite: {self.get_suite_name()} - " + "Repeat has a greater increment than total time.") + sys.exit(1) + + # Setup the start date. + targetnode += ecflow.Date(f"{startdate.strftime('%d.%m.%Y')}") + + # If the dates are the same day, we only need a time string: + if startdate.date() == enddate.date(): + deltahours, deltaminutes = delta.seconds // 3600, delta.seconds // 60 % 60 + time_string = (f"{startdate.strftime('%H:%M')} " + f"{enddate.strftime('%H:%M')} " + f"{deltahours:02}:{deltaminutes:02}") + targetnode += ecflow.Time(time_string) + # If the days don't match up, we'll need to do some repeats. + else: + deltahours, deltaminutes = delta.seconds // 3600, delta.seconds // 60 % 60 + if delta.total_seconds() < 86400: + position_time = startdate + total_instances = 0 + while position_time <= enddate: + total_instances += 1 + position_time = position_time + delta + if len(start) == 10: + targetnode += ecflow.Time(f"{startdate.strftime('%H:%M')}") + else: + targetnode += ecflow.Today(ecflow.TimeSlot(0, 0), True) + targetnode += ecflow.Time(deltahours, deltaminutes, True) + targetnode += ecflow.RepeatInteger("RUN", 1, total_instances) + else: + if deltahours == 0 and deltaminutes == 0: + position_time = startdate + delta + if len(start) == 10: + targetnode += ecflow.Time(f"{startdate.strftime('%H:%M')}") + else: + targetnode += ecflow.Time(00, 00, True) + while position_time <= enddate: + position_string = f"{position_time.strftime('%d.%m.%Y')}" + targetnode += ecflow.Date(position_string) + position_time = position_time + delta + else: + position_time = startdate + while position_time <= enddate: + targetnode += ecflow.Cron(position_time.strftime('%H:%M'), + days_of_month=[int(position_time.strftime('%d'))], + months=[int(position_time.strftime('%m'))]) + position_time = position_time + delta + + def add_trigger(self, trigger, parent, state=None, event=None, suite=None, + suite_array=None, operand=None): + """ + Adds a trigger to the parent node. Triggers can be added to families + and tasks. + + Parameters + ---------- + trigger : str + The trigger string to add to the parent node. + parent : str + The parent node that will accept the trigger + state : str + The state of the trigger. Generally looking for complete, active, + or queued. + event : str + If there is an event associated with a task, this will add it to + the trigger definition. + suite : str + If the trigger is looking outside the current suite, this will + pull in the details from the other suites and attach the trigger. + suite_array : dict + This is the array of suites in the event that the suite value is + populated, the details of the suite need to be made available to + the function + operand : bool + This is a true/false value that is looking to define if the trigger + is an AND or an OR. If it is TRUE it is an AND, if it is FALSE, it + is an OR. + + Returns + ------- + None + """ + + if suite is not None: + try: + trigger_path = suite_array[suite].get_task(trigger).get_abs_node_path() + if state is None and event is None: + add_trigger = ecflow.Trigger(f"{trigger_path} == complete") + elif state is not None and event is None: + add_trigger = ecflow.Trigger(f"{trigger_path} == {state}") + elif state is None and event is not None: + add_trigger = ecflow.Trigger(f"{trigger_path}:{event}") + except KeyError as e: + print(f"Suite {suite} for task/trigger {parent}/{trigger}" + " is not available. Please check the configuration file.") + print("Error {e}") + sys.exit(1) + else: + try: + if state is None and event is None: + add_trigger = ecflow.Trigger([self.ecfnodes[trigger]]) + elif state is not None and event is None: + trigger_path = self.ecfnodes[trigger].get_abs_node_path() + add_trigger = ecflow.Trigger(f"{trigger_path} == {state}") + elif state is None and event is not None: + trigger_path = self.ecfnodes[trigger].get_abs_node_path() + add_trigger = ecflow.Trigger(f"{trigger_path}:{event}") + except KeyError as e: + print(f"The node/trigger {parent}/{trigger} is not available " + f"in suite {self.get_suite_name()}." + " Please check the configuration file.") + print(f"Error {e}") + sys.exit(1) + if operand is not None: + add_trigger = ecflow.Trigger(add_trigger.get_expression(), operand) + self.ecfnodes[parent].add(add_trigger) + + def add_family(self, family, parents=None): + """ + Adds a family to the suite. If the parents value is set to none, then + it will be added as a top level family. Otherwise, it will be added as + a sub-family to the parents. + + Parameters + ---------- + family : str + The name of the family that is to be added to the suite. + parents : str + The string representation of the parent nodes that the family needs + to be added to. + + Returns + ------- + None + """ + + family_name = f"{parents}>{family}" if parents else family + + # If the name already exists, the family already exists + if family_name not in self.ecfnodes.keys(): + self.ecfnodes[family_name] = ecfFamily(family) + if self.build_tree: + self.ecfnodes[family_name].generate_folders(self.ecfhome, self.get_suite_name(), parents) + + if parents: + self.ecfnodes[parents] += self.ecfnodes[family_name] + else: + self.ecfsuite += self.ecfnodes[family_name] + + def add_task(self, task, parents, scriptrepo, template=None): + """ + Adds a task to the parent node. If the build is set to true then the + method also calls the creation method in the ecfTask class to deploy + the script to the proper location. The script repo is where it will + look for the script. If template is set, it will look for that template + and then copy and change the name of the template at the destination to + the name of the task. + + Parameters + ---------- + task : str + The name of the task + parents : str + The name of the parent nodes to get the task + scriptrepo : str + File path to the script repository to look for the task.ecf scripts + template : str + Name of the template file to use instead of searching for the name + of the task in the script repo. + + Returns + ------- + None + """ + + task_node = ecfTaskNode(task) + if task_node.is_loop() or task_node.is_list: + for task_number in task_node.get_range(): + task_name = f"{task_node.get_full_name(task_number)}" + if task_name not in self.ecfnodes.keys(): + self.ecfnodes[task_name] = ecfTask(task_name) + self.ecfnodes[task_name].setup_script(scriptrepo, template) + if self.build_tree: + self.ecfnodes[task_name].generate_ecflow_task(self.ecfhome, self.get_suite_name(), parents) + self.ecfnodes[parents] += self.ecfnodes[task_name] + else: + if task not in self.ecfnodes.keys(): + self.ecfnodes[task] = ecfTask(task) + self.ecfnodes[task].setup_script(scriptrepo, template) + if self.build_tree: + self.ecfnodes[task].generate_ecflow_task(self.ecfhome, self.get_suite_name(), parents) + self.ecfnodes[parents] += self.ecfnodes[task] + + def add_task_edits(self, task, edit_dict): + """ + Adds edits to a task. This takes in the edit_dict and then calls the + add_edit method to apply them to that task. + + This function also breaks apart any lists or ranges that are passed in + to the tasks and applies it to all of them. It also applies any loop + logic that is applied to the parent task to the edits themselves. + + Parameters + ---------- + task : str + The name of the task. Can also include a list or range object in + the string. + edit_dict : dict + A dictionary of the edits that are to be applied to the tasks. + + Returns + ------- + None + """ + + task_node = ecfTaskNode(task) + if task_node.is_loop() or task_node.is_list: + loop_index = 0 + for task_number in task_node.get_range(): + task_name = f"{task_node.get_full_name(task_number)}" + for edit, editvalue in edit_dict.items(): + editNodeValue = ecfEditNode(editvalue) + if editNodeValue.is_loop(): + if editNodeValue.use_parent_counter: + neweditvalue = f"{editNodeValue.get_full_name(task_number)}" + else: + total_tasks=len(task_node.get_range()) + edit_range = editNodeValue.get_range(max_value=total_tasks) + edit_count = [*edit_range] + neweditvalue = f"{editNodeValue.get_full_name(edit_count[loop_index])}" + elif editNodeValue.is_list: + try: + if len(editNodeValue.items) == len(task_node.get_range()): + neweditvalue = f"{editNodeValue.get_full_name(loop_index)}" + else: + raise ConfigurationError + except ConfigurationError: + print(f"The listed array of {edit} " + "exceeds the parent counter." + " Please check the configuration file") + sys.exit(1) + else: + neweditvalue = editvalue + self.add_edit({edit: neweditvalue}, task_name) + loop_index += 1 + else: + for edit in edit_dict: + self.add_edit({edit: edit_dict[edit]}, task) + + def add_task_repeat(self, task, repeat): + """ + Adds a repeats to task nodes. This function primarily breaks down the + tasks into lists or ranges based on the task string and then adds the + repeat to the breakout. + + Parameters + ---------- + task : str + The name of the task or list/range of tasks to add the repeat. + repeat : str + The repeat string to be passed to the add_repeat method. + + Returns + ------- + None + """ + + task_node = ecfTaskNode(task) + if task_node.is_loop() or task_node.is_list: + for task_number in task_node.get_range(): + task_name = f"{task_node.get_full_name(task_number)}" + self.add_repeat(repeat, task_name) + else: + self.add_repeat(repeat, task) + + def add_task_defstatus(self, task, defstatus): + """ + Adds a defstatus to a task node. This function breaks down the task + string into a range or list if necessary and then adds the calls the + add_defstatus method. + + Parameters + ---------- + task : str + The task string to add the defstatus pieces to. Can be a range or + list as well. + defstatus : str + String that represents the defstatus, like complete. + + Returns + ------- + None + """ + + task_node = ecfTaskNode(task) + if task_node.is_loop() or task_node.is_list: + for task_number in task_node.get_range(): + task_name = f"{task_node.get_full_name(task_number)}" + self.add_defstatus(defstatus, task_name) + else: + self.add_defstatus(defstatus, task) + + def add_task_events(self, task, events): + """ + Adds events to a task. This function breaks down the task string into + ranges or lists if necessary but also breaks down the events if those + are a list or range. It then passes the fully formed pieces to the + add_event method to add them to the suite. + + Parameters + ---------- + task : str + The task string to add the event to. + events : str + The events string that will be added to the task. + + Returns + ------- + None + """ + + task_node = ecfTaskNode(task) + if task_node.is_loop() or task_node.is_list: + for task_number in task_node.get_range(): + loop_index = 0 + task_name = f"{task_node.get_full_name(task_number)}" + for event in events: + eventNode = ecfEventNode(event) + if eventNode.is_loop(): + if eventNode.use_parent_counter: + event_name = f"{eventNode.get_full_name(task_number)}" + self.add_event(event_name, task_name) + else: + event_counter = eventNode.get_range() + for event_number in event_counter: + event_name = f"{eventNode.get_full_name(event_number)}" + self.add_event(event_name, task_name) + elif eventNode.is_list: + try: + if len(eventNode.items) == len(task_node.get_range()): + event_name = f"{eventNode.get_full_name(loop_index)}" + self.add_event(event_name, task_name) + else: + raise ConfigurationError + except ConfigurationError: + print(f"The listed array of {eventNode.get_name()} " + "exceeds the parent counter." + " Please check the configuration file") + sys.exit(1) + + else: + self.add_event(event, task_name) + loop_index += 1 + else: + for event in events: + eventNode = ecfEventNode(event) + if eventNode.is_loop(): + for event_number in eventNode.get_range(): + event_name = f"{eventNode.get_full_name(event_number)}" + self.add_event(event_name, task) + else: + self.add_event(event, task) + + def add_task_triggers(self, task, triggers, suite_array): + """ + Adds triggers to a task. This is a fairly complex method and might be + able to be broken into smaller pieces at some point. The triggers + can be loops in themselves, based on a task with an event or a loop of + events. Or even a loop of other tasks from other suites. This function + breaks down the tasks themselves and then also any loop/list logic that + exists within the trigger and applies them to the task with the + add_trigger method. + + Parameters + ---------- + task : str + The task string, list, range or static, that is to be broken down + and then the triggers applied. + triggers : dict + The dictionary of triggers to add to the task. + suite_array : dict + In case the triggers are from another suite, this calls the trigger + from the other suite. + + Methods + ------- + process_trigger(trigger_name, triggerTaskNode, task, + task_loop_index=None, total_tasks=None, + task_number=None) + Since processing the triggers for each of the tasks has a lot of + repetative break downs, it is useful to create a method within + the add_task_trigger method to reduce the repetative code. + + Returns + ------- + None + """ + + def process_trigger(trigger_name, triggerTaskNode, task, + task_loop_index=None, total_tasks=None, + task_number=None): + """ + Since processing the triggers for each of the tasks has a lot of + repetative break downs, it is useful to create a method within + the add_task_trigger method to reduce the repetative code. + + Parameters + ---------- + trigger_name : str + The name of the trigger + triggerTaskNode : ecfTaskNode + This is the full object of the task. Needed because the + methods of that class allow this method to identify if there + are any events, loops, etc. + task : str + The task to add the triggers to. + task_loop_index : int + If the parent task is a list/loop, this is the current index + value to it can be a 1 to 1 match. I.E. the second value of the + list in the task uses the second value of the list in the + trigger. + total_tasks : int + This is used in the event that the task is a range and can be + used with the range function in the ecfTaskNode class to get + the appropriate values + task_number : int + The task ID number to get the full name of the task in the + event that the task is a list. + + Returns + ------- + None + """ + + if triggerTaskNode.has_suite(): + suite = triggerTaskNode.get_suite() + else: + suite = None + operand = None + if triggerTaskNode.has_operand(): + operand = bool(triggerTaskNode.get_operand()) + if triggerTaskNode.has_state(): + self.add_trigger(trigger_name, task, state=triggerTaskNode.get_state(), suite=suite, + suite_array=suite_array, operand=operand) + elif triggerTaskNode.has_event(): + if triggerTaskNode.is_event_loop(): + if triggerTaskNode.has_event_max_value(): + for event_count in triggerTaskNode.get_event_range(): + event_name = f"{triggerTaskNode.get_event_full_name(event_count)}" + self.add_trigger(trigger_name, task, event=event_name, suite=suite, + suite_array=suite_array, operand=operand) + elif triggerTaskNode.event_parent_counter: + event_name = f"{triggerTaskNode.get_event_full_name(task_number)}" + self.add_trigger(trigger_name, task, event=event_name, suite=suite, + suite_array=suite_array, operand=operand) + else: + event_range = triggerTaskNode.get_event_range(max_value=total_tasks) + event_count = [*event_range] + event_name = f"{triggerTaskNode.get_event_full_name(event_count[task_loop_index])}" + self.add_trigger(trigger_name, task, event=event_name, suite=suite, + suite_array=suite_array, operand=operand) + else: + self.add_trigger(trigger_name, task, event=triggerTaskNode.get_event(), suite=suite, + suite_array=suite_array, operand=operand) + else: + self.add_trigger(trigger_name, task, suite=suite, + suite_array=suite_array, operand=operand) + + task_node = ecfTaskNode(task) + if task_node.is_loop() or task_node.is_list: + task_loop_index = 0 + for task_number in task_node.get_range(): + task_name = f"{task_node.get_full_name(task_number)}" + total_tasks = len(task_node.get_range()) + for trigger in triggers: + triggerTaskNode = ecfTriggerNode(trigger) + if triggerTaskNode.is_loop() or triggerTaskNode.is_list: + if triggerTaskNode.is_list or triggerTaskNode.has_max_value(): + for trigger_flag in triggerTaskNode.get_range(): + trigger_name = f"{triggerTaskNode.get_full_name(trigger_flag)}" + process_trigger(trigger_name, triggerTaskNode, task_name, + task_loop_index, total_tasks, task_number) + else: + trigger_range = triggerTaskNode.get_range(max_value=total_tasks) + trigger_count = [*trigger_range] + trigger_name = f"{triggerTaskNode.get_full_name(trigger_count[task_loop_index])}" + process_trigger(trigger_name, triggerTaskNode, task_name, task_loop_index, + total_tasks, task_number) + else: + process_trigger(triggerTaskNode.get_name(), triggerTaskNode, task_name, + task_loop_index, total_tasks, task_number) + task_loop_index += 1 + else: + for trigger in triggers: + triggerTaskNode = ecfTriggerNode(trigger) + if triggerTaskNode.is_loop(): + try: + if triggerTaskNode.has_max_value(): + for trigger_flag in triggerTaskNode.get_range(): + trigger_name = f"{triggerTaskNode.get_full_name(trigger_flag)}" + process_trigger(trigger_name, triggerTaskNode, task) + else: + raise ConfigurationError + except ConfigurationError: + print(f"Task: {task} - Looping mechanism called " + "without max value in a non looped task.") + sys.exit(1) + else: + process_trigger(triggerTaskNode.get_name(), triggerTaskNode, task) + + +class ecfNode(): + """ + This is the base class for the other classes that are used to identify any + loops, lists, or what the item might be and also assign the name to the + object. This reduces the overhead for code and also makes it easier to + add in additional node type objects. Most of the objects extend this class + so this one is the main functions that apply to all node types. + + Attributes + ---------- + initial_count : int + In the event that the node is a range this value will hold the initial + count value for the object. + increment : int + In the event that the node is a range or list, this holds the amount + to increment the counter. + max_value : int + In the event that the node is a range or list, this holds the max value + associated with it. + name : str + Name of the object. + is_list : bool + If the node contains the [ ] list syntax. True if it does, false + otherwise. + use_parent_counter : bool + If the node use a list or range syntax but has no internal values, + indicating that it should use the range of the parent node. + + Methods + ------- + get_name() + Returns the name of the node. + + is_loop() + Checks to see if the ecfNode is a loop. If it is, this function also + calls the supporting functions to set the range values, if there is + a max, min, interval, or list. + + invalid_range() + Helper function to ensure that the range is valid. Exits if it is not. + + set_max_value(range_token) + The range token is passed in and if only one value is set in the range + then it is set to max value and the initial is set to 0 and the + interval is set to 1. + + set_initial_max_value(range_token) + If the range token is passed two parameters, they are assumed to be + the initial and max values. This sets those values for the node. The + interval is set to 1. + + set_initial_increment_max_value(range_token) + If three values are sent in through the range token, this sets the max, + initial, and increment values. + + get_full_name(counter=None) + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + has_max_value() + Returns true if the node object range has a maximum value. + + get_max_value() + Returns the maximum value for the node. + + get_range(initial_count=0, increment=1, max_value=1) + If the node has a list or range associated with it, this returns the + range of items or the range of the array. + """ + + def __init__(self, ecfItem): + """ + Parameters + ---------- + ecfItem : str + Name of the ecfNode item. If it contains a range or list + identifier, the other values are populated to identify what kind + of node it is. + """ + + if isinstance(ecfItem, str): + if re.search(r".*\(.*\).*", ecfItem): + self.initial_count = None + self.increment = None + self.name = ecfItem + self.is_list = False + elif re.search(r".*\[.*\].*", ecfItem): + self.initial_count = None + self.increment = None + self.name = ecfItem + self.is_list = True + self.use_parent_counter = False + self.items = re.search(".*\[(.*)\].*",self.name).group(1).strip().split(',') + else: + self.name = ecfItem + self.is_list = False + elif isinstance(ecfItem, list): + self.initial_count = None + self.increment = None + self.name = '' + self.is_list = True + self.items = ecfItem + else: + self.name = ecfItem + self.is_list = False + + def get_name(self): + """ + Returns the name of the node. + + Parameters + ---------- + None + + Returns + ------- + str + The name in string format. + """ + + return self.name + + def is_loop(self): + """ + Checks to see if the ecfNode is a loop. If it is, this function also + calls the supporting functions to set the range values, if there is + a max, min, interval, or list. + + The range is split into a tokenized array. + + Parameters + ---------- + None + + Returns + ------- + bool + True if the node is a loop format defined by ( ). + """ + + range_functions = { + 1: self.set_max_value, + 2: self.set_initial_max_value, + 3: self.set_initial_increment_max_value, + } + if re.search(r".*\(.*\).*", self.name): + self.use_parent_counter = False + range_token = re.search(".*\((.*)\).*", self.name).group(1).strip().split(',') + range_functions.get(len(range_token), self.invalid_range)(range_token) + return True + else: + return False + + def invalid_range(self): + """ + Helper function to ensure that the range is valid. Exits if it is not. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + print(f"The range specified in {self.name} is out of bounds. " + "Please review the configuration.") + sys.exit(1) + + def set_max_value(self, range_token): + """ + The range token is passed in and if only one value is set in the range + then it is set to max value and the initial is set to 0 and the + interval is set to 1. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + self.initial_count = None + self.increment = None + if not range_token[0]: + self.max_value = None + self.use_parent_counter = True + else: + try: + self.max_value = int(range_token[0]) + except TypeError: + print(f"Maximum value for {self.name} is not an integer") + sys.exit(1) + + def set_initial_max_value(self, range_token): + """ + If the range token is passed two parameters, they are assumed to be + the initial and max values. This sets those values for the node. The + interval is set to 1. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + try: + self.initial_count = None if not range_token[0] else int(range_token[0]) + except TypeError: + print(f"Initial count value for {self.name} is not an integer") + sys.exit(1) + self.increment = None + if not range_token[1]: + self.max_value = None + else: + try: + self.max_value = int(range_token[1]) + except TypeError: + print(f"Maximum value for {self.name} is not an integer") + sys.exit(1) + + def set_initial_increment_max_value(self, range_token): + """ + If three values are sent in through the range token, this sets the max, + initial, and increment values. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + try: + self.initial_count = None if not range_token[0] else int(range_token[0]) + self.increment = None if not range_token[2] else int(range_token[2]) + except TypeError: + print(f"Initial count and increment values for {self.name} " + "are not integers") + sys.exit(1) + if not range_token[1]: + self.max_value = None + else: + try: + self.max_value = int(range_token[1]) + except TypeError: + print(f"Maximum value for {self.name} is not an integer") + sys.exit(1) + + def get_full_name(self, counter=None): + """ + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + Parameters + ---------- + counter : str or int + If it is a str, returns the list item in that position. If it is + an int, then return the counter position for it. + + Returns + ------- + None + """ + + try: + if re.search(r"\(.*\)", self.name): + name_token = re.search("(.*)\(.*\)(.*)", self.name) + base = name_token.group(1).strip() + suffix = name_token.group(2).strip() + if isinstance(counter, int): + return f"{base}{counter:03}{suffix}" + elif isinstance(counter, str): + return f"{base}{counter}{suffix}" + elif re.search(r"\[.*\]", self.name): + name_token = re.search("(.*)\[.*\](.*)", self.name) + base = name_token.group(1).strip() + suffix = name_token.group(2).strip() + array_item = self.items[counter] + if isinstance(array_item, int): + return f"{base}{array_item:03}{suffix}" + elif isinstance(array_item, str): + return f"{base}{array_item}{suffix}" + elif self.is_list: + array_item = self.items[counter] + if isinstance(array_item, int): + return f"{array_item:03}" + elif isinstance(array_item, str): + return f"{array_item}" + else: + return self.name + except ValueError as err: + print(f"Problem getting full name of {self.name}. Error: {err}") + + def has_max_value(self): + """ + Returns true if the node object range has a maximum value. + + Parameters + ---------- + None + + Returns + ------- + bool + True if there is a max value, false otherwise. + """ + + return True if self.max_value is not None else False + + def get_max_value(self): + """ + Returns the maximum value for the node. + + Parameters + ---------- + None + + Returns + ------- + int + The max value if one is set. + """ + + return self.max_value + + def get_range(self, initial_count=0, increment=1, max_value=1): + """ + If the node has a list or range associated with it, this returns the + range of items or the range of the array. + + Parameters + ---------- + initial_count : int + The initial count which is defaulted to 1 in case it wasn't defined + increment : int + The increment value to use for the range in case it wasn't defined + max_value : int + The maximum value for the range. + + Returns + ------- + None + """ + + if self.is_list: + return range(initial_count, len(self.items), increment) + else: + if self.initial_count is not None: + initial_count = self.initial_count + if self.increment is not None: + increment = self.increment + if self.max_value is not None: + max_value = self.max_value + max_value = (max_value * increment) + initial_count + return range(initial_count, max_value, increment) + + +class ecfTaskNode(ecfNode): + """ + Extension class for the ecfNodes to identify tasks. + + Methods + ------- + get_type() + Returns that this node is a task type. + """ + + def get_type(self): + """ + Returns that this node is a task type. + + Parameters + ---------- + None + + Returns + ------- + str + The value of task to identify the node is a task. + """ + + return 'task' + + +class ecfTriggerNode(ecfNode): + """ + Extension class for the ecfNodes to identify triggers. Overloads the + constructors since triggers can have multiple levels within themselves + for events and such. + + Attributes + ---------- + name : str + is_list : bool + items : dict or array + use_parent_counter : bool + operand : str + + Methods + ------- + get_type() + Returns that this node is a trigger type. + + has_operand() + If the trigger has an operand to indciate if it needs to be added as an + OR or AND in the trigger statement, set the value and return True, + otherwise false. + + get_operand() + Returns the operand associated with the trigger. + + get_state() + Returns the state associated with the trigger if one was defined. + + get_event() + Returns the event_string associated with the trigger if one was defined + + has_suite() + If a suite was passed in as part of the parameters in the keys, this + returns True and sets the suite attribute to the suite name. + + get_suite() + Returns the suite name. + + has_state() + If a state was passed in with the YAML parameters, return true and set + the state attribute to the state of the trigger. + + get_state() + Returns the state for the trigger. + + has_event() + If the trigger has an event associated with it, it is possible that the + event has a loop. This method determines if the trigger has an event + and if it does identifies the event string and items associated with it + so that it can be used in other functions later. If it does have the + loop or list identifiers then it returns true, otherwise false. + + is_event_loop() + If the event that exists as part of the trigger is a loop, + this breaks down the event loop into the appropriate + values and returns true. Otherwise returns false. + + invalid_event_range() + Helper method to exit the application if the event range is invalid. + + set_event_max_value(range_token) + The range token is passed in and if only one value is set in the range + then it is set to max value and the initial is set to 0 and the + interval is set to 1. + + set_event_initial_max_value(range_token) + If the range token is passed two parameters, they are assumed to be + the initial and max values. This sets those values for the node. The + interval is set to 1. + + set_event_initial_increment_max_value(range_token) + If three values are sent in through the range token, this sets the max, + initial, and increment values. + + get_event_full_name(counter=None) + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + has_event_max_value() + Returns true if the node object range has a maximum value. + + get_event_max_value() + Returns the maximum value for the node. + + get_event_range(initial_count=0, increment=1, max_value=1) + If the node has a list or range associated with it, this returns the + range of items or the range of the array. + """ + + def __init__(self, ecfItem): + """ + Parameters + ---------- + ecfItem : dict or str + A dictionary or string item that represents the current node. + """ + self.task_setup = ecfItem + if isinstance(ecfItem['task'], str): + if re.search(r".*\(.*\).*", ecfItem['task']): + self.name = ecfItem['task'] + self.is_list = False + elif re.search(r".*\[.*\].*", ecfItem['task']): + self.name = ecfItem['task'] + self.is_list = True + self.use_parent_counter = False + self.items = re.search(".*\[(.*)\].*", + ecfItem['task']).group(1).strip().split(',') + else: + self.name = ecfItem['task'] + self.is_list = False + elif isinstance(ecfItem, list): + self.name = '' + self.is_list = True + self.items = ecfItem['task'] + else: + self.name = ecfItem['task'] + self.is_list = False + + def get_type(self): + """ + Returns that this node is a trigger type. + + Parameters + ---------- + None + + Returns + ------- + str + The value trigger to identify that this is a trigger node. + """ + + return 'trigger' + + def has_operand(self): + """ + If the trigger has an operand to indciate if it needs to be added as an + OR or AND in the trigger statement, set the value and return True, + otherwise false. + + Parameters + ---------- + None + + Returns + ------- + bool + True if there is an operand associated with the trigger. + """ + + if 'operand' in self.task_setup.keys(): + self.operand = self.task_setup['operand'] + return True + else: + return False + + def get_operand(self): + """ + Returns the operand associated with the trigger. + + Parameters + ---------- + None + + Returns + ------- + str or bool + Return the operand. + """ + + return self.operand + + def get_state(self): + """ + Returns the state associated with the trigger if one was defined. + + Parameters + ---------- + None + + Returns + ------- + str + The state in string format. + """ + + return self.state + + def get_event(self): + """ + Returns the event_string associated with the trigger if one was defined + + Parameters + ---------- + None + + Returns + ------- + str + The event in string format. + """ + + return self.event_string + + def has_suite(self): + """ + If a suite was passed in as part of the parameters in the keys, this + returns True and sets the suite attribute to the suite name. + + Parameters + ---------- + None + + Returns + ------- + bool + If there is a suite associated with the trigger, return true + otherwise false. + """ + if 'suite' in self.task_setup.keys(): + self.suite = self.task_setup['suite'] + return True + else: + return False + + def get_suite(self): + """ + Returns the suite name. + + Parameters + ---------- + None + + Returns + ------- + str + The name of the suite in string format. + """ + return self.suite + + def has_state(self): + """ + If a state was passed in with the YAML parameters, return true and set + the state attribute to the state of the trigger. + + Parameters + ---------- + None + + Returns + ------- + bool + Returns true if there is a state value in the keys, otherwise + false. + """ + if 'state' in self.task_setup.keys(): + self.state = self.task_setup['state'] + return True + else: + return False + + def has_event(self): + """ + If the trigger has an event associated with it, it is possible that the + event has a loop. This method determines if the trigger has an event + and if it does identifies the event string and items associated with it + so that it can be used in other functions later. If it does have the + loop or list identifiers then it returns true, otherwise false. + + Parameters + ---------- + None + + Returns + ------- + bool + Returns true if the trigger has an event in either list or + string format. + """ + if 'event' in self.task_setup.keys(): + if isinstance(self.task_setup['event'], str): + if re.search(r".*\(.*\).*", self.task_setup['event']): + self.event_string = self.task_setup['event'] + self.is_event_list = False + elif re.search(r".*\[.*\].*", self.task_setup['event']): + self.event_string = self.task_setup['event'] + self.is_event_list = True + self.items = re.search(".*\[(.*)\].*", + self.task_setup['event']).group(1).strip().split(',') + else: + self.event_string = self.task_setup['event'] + self.is_event_list = False + elif isinstance(self.task_setup['event'], list): + self.is_event_list = True + self.event_items = self.task_setup['event'] + else: + self.event_string = self.task_setup['event'] + self.is_event_list = False + return True + else: + return False + + def is_event_loop(self): + """ + If the event that exists as part of the trigger is a loop, + this breaks down the event loop into the appropriate + values and returns true. Otherwise returns false. + + Parameters + ---------- + None + + Returns + ------- + bool + If there is an event loop, returns true, otherwise false. + """ + range_functions = { + 1: self.set_event_max_value, + 2: self.set_event_initial_max_value, + 3: self.set_event_initial_increment_max_value, + } + if re.search(r"\(.*\)", self.event_string): + self.event_parent_counter = False + range_token = re.search(".*\((.*)\).*", self.event_string).group(1).strip().split(',') + range_functions.get(len(range_token), self.invalid_range)(range_token) + return True + else: + return False + + def invalid_event_range(self): + """ + Helper method to exit the application if the event range is invalid. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + print(f"The range specified in {self.name} is out of bounds. " + "Please review the configuration.") + sys.exit(1) + + def set_event_max_value(self, range_token): + """ + The range token is passed in and if only one value is set in the range + then it is set to max value and the initial is set to 0 and the + interval is set to 1. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + self.event_initial_count = None + self.event_increment = None + if not range_token[0]: + self.event_max_value = None + self.event_parent_counter = True + else: + try: + self.event_max_value = int(range_token[0]) + except TypeError: + print(f"Maximum value for {self.event_string} is not an integer") + sys.exit(1) + + def set_event_initial_max_value(self, range_token): + """ + If the range token is passed two parameters, they are assumed to be + the initial and max values. This sets those values for the node. The + interval is set to 1. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + try: + self.event_initial_count = None if not range_token[0] else int(range_token[0]) + except TypeError: + print(f"Initial value for {self.event_string} is not an integer") + sys.exit(1) + self.event_increment = None + if not range_token[1]: + self.event_max_value = None + else: + try: + self.event_max_value = int(range_token[1]) + except TypeError: + print(f"Maximum value for {self.event_string} is not an integer") + sys.exit(1) + + def set_event_initial_increment_max_value(self, range_token): + """ + If three values are sent in through the range token, this sets the max, + initial, and increment values. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + try: + self.event_initial_count = None if not range_token[0] else int(range_token[0]) + self.event_increment = None if not range_token[2] else int(range_token[2]) + except TypeError: + print(f"Initial cound and increment values for " + f"{self.event_string} are not integers") + sys.exit(1) + if not range_token[1]: + self.event_max_value = None + else: + try: + self.event_max_value = int(range_token[1]) + except TypeError: + print(f"Maximum value for {self.event_string} " + "is not an integer") + sys.exit(1) + + def get_event_full_name(self, counter=None): + """ + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + Parameters + ---------- + counter : str or int + If it is a str, returns the list item in that position. If it is + an int, then return the counter position for it. + + Returns + ------- + None + """ + + try: + if re.search(r"\(.*\)", self.event_string): + name_token = re.search("(.*)\(.*\)(.*)", self.event_string) + base = name_token.group(1).strip() + suffix = name_token.group(2).strip() + if isinstance(counter, int): + return f"{base}{counter:03}{suffix}" + elif isinstance(counter, str): + return f"{base}{counter}{suffix}" + elif re.search(r"\[.*\]", self.event_string): + name_token = re.search("(.*)\[.*\](.*)", self.event_string) + base = name_token.group(1).strip() + suffix = name_token.group(2).strip() + array_item = self.event_items[counter] + if isinstance(array_item, int): + return f"{base}{array_item:03}{suffix}" + elif isinstance(array_item, str): + return f"{base}{array_item}{suffix}" + elif self.is_list: + array_item = self.event_items[counter] + if isinstance(array_item, int): + return f"{array_item:03}" + elif isinstance(array_item, str): + return f"{array_item}" + else: + return self.event_string + except ValueError as err: + print(f"Problem getting full name of {self.event_string}. " + "Error: {err}") + + def has_event_max_value(self): + """ + Returns true if the node object range has a maximum value. + + Parameters + ---------- + None + + Returns + ------- + bool + True if there is an event max value associated with the trigger. + """ + return True if self.event_max_value is not None else False + + def get_event_max_value(self): + """ + Returns the maximum value for the node. + + Parameters + ---------- + None + + Returns + ------- + int + The max value associated with the event. + """ + return self.event_max_value + + def get_event_range(self, initial_count=0, increment=1, max_value=1): + """ + If the event with the trigger has a list or range associated with it, + this returns the range of items or the range of the array. + + Parameters + ---------- + initial_count : int + The initial count which is defaulted to 1 in case it wasn't defined + increment : int + The increment value to use for the range in case it wasn't defined + max_value : int + The maximum value for the range. + + Returns + ------- + range + The range of items within the event. If it is a list it uses the + total number of items in the list, otherwise it pulls the values + defined in the YAML. + """ + + if self.is_event_list: + return range(initial_count, len(self.event_items), increment) + else: + if self.event_initial_count is not None: + initial_count = self.event_initial_count + if self.event_increment is not None: + increment = self.event_increment + if self.event_max_value is not None: + max_value = self.event_max_value + max_value = (max_value * increment) + initial_count + return range(initial_count, max_value, increment) + +class ecfEventNode(ecfNode): + """ + Extension class for the ecfNodes to identify events. + + Methods + ------- + get_type() + Returns that this node is an event type. + """ + + def get_type(self): + """ + Returns that this node is an event type. + + Parameters + ---------- + None + + Returns + ------- + str + The string event to identify this as an event node. + """ + return 'event' + +class ecfEditNode(ecfNode): + """ + Extension class for the ecfNodes to identify edits. + + Methods + ------- + get_type() + Returns that this node is an edit type. + """ + + def get_type(self): + """ + Returns that this node is an edit type. + + Parameters + ---------- + None + + Returns + ------- + str + The string edit to identify this as an edit node. + """ + + return 'edit' + +class ecfRoot( ): + """ + A root level class that is not an ecfNode object from above but an + object that will extend a class from the ecflow module. + + Methods + ------- + get_base_name() + Returns the prefix to a node. + """ + + def get_base_name(): + """ + Returns the prefix to a node. + * Not currently in use, may be removed at a later date. + + Parameters + ---------- + None + + Returns + ------- + str + The name of the node if it has a prefix, this strips out the + surrounding range and just returns the beginning. + """ + return re.search("(.*)\{.*\}",self.name()).group(1).strip() + +class ecfSuite(ecflow.Suite, ecfRoot): + """ + Extends the ecfRoot and ecflow.Suite classes to provide an additional + function when defining the suite that also it can generate the folders + for the suite and populate the families/tasks. + + Methods + ------- + generate_folders(ecfhome) + This function uses the ecfhome directory as a base and if it doesn't + exist makes the suite folder at the ecfhome. + """ + + def generate_folders(self, ecfhome): + """ + This function uses the ecfhome directory as a base and if it doesn't + exist makes the suite folder at the ecfhome. + + Parameters + ---------- + ecfhome : str + Path to the root level directory for the ecfhome. + + Returns + ------- + None + """ + + folder_path = f"{ecfhome}/{self.name()}" + if not os.path.exists(folder_path): + os.makedirs(folder_path) + +class ecfFamily(ecflow.Family, ecfRoot): + """ + Extends the ecflow.Family and ecfRoot classes to provide the folder + generation structure for families at the ecfhome location. + + Methods + ------- + generate_folders(ecfhome,suite,parents) + Uses the ecfhome as the root, then looks in the suite directory to + determine if the family name has been created. It also splits out the + parent folders to put everything in the proper tier. + """ + + def generate_folders(self, ecfhome, suite, parents): + """ + Uses the ecfhome as the root, then looks in the suite directory to + determine if the family name has been created. It also splits out the + parent folders to put everything in the proper tier. + + Parameters + ---------- + ecfhome : str + The root level directory as a string + suite : str + The suite name to be appended to the ecfhome. + parents : str + Any of the parent families to ensure that the folder structure is + setup correctly. + + Returns + ------- + None + """ + if parents: + folder_path = f"{ecfhome}/{suite}/{parents.replace('>','/')}/{self.name()}" + else: + folder_path = f"{ecfhome}/{suite}/{self.name()}" + if not os.path.exists(folder_path): + os.makedirs(folder_path) + +class ecfTask(ecflow.Task, ecfRoot): + """ + Extends the ecflow.Task and ecfRoot classes to allow the task scripts to + be defined and then also created. If there is a template associated with + the task, it will use that to create the script name in the appropriate + location. + + Methods + ------- + setup_script(repopath,template) + Sets the parameters for the script if there is a repo path for the + script repo that isn't the default and template if that is also + defined for a task. + + generate_ecflow_task(ecfhome,suite,parents) + Uses the parameters passed in to define the folder path and then + looks in the script repository for the task name with a .ecf suffix or + template name with a .ecf suffix and then copies that script content + from the script repo over to the destination provided by the parameters + """ + + def setup_script(self, repopath, template): + """ + Sets the parameters for the script if there is a repo path for the + script repo that isn't the default and template if that is also + defined for a task. + + Parameters + ---------- + scriptrepo : str + Path to the script repository used to populate the destination. + template : str + The template script if needed so the application will use that + instead of searching for the task name in the script repo. + + Returns + ------- + None + """ + self.scriptrepo = repopath + self.template = template + + def generate_ecflow_task(self, ecfhome, suite, parents): + """ + Uses the parameters passed in to define the folder path and then + looks in the script repository for the task name with a .ecf suffix or + template name with a .ecf suffix and then copies that script content + from the script repo over to the destination provided by the parameters + + Parameters + ---------- + ecfhome : str + Path to the root level directory to place the scripts. + suite : str + Suite name to add the scripts to that will be appended to the + ecfhome + parents: str + Any parent folders that are appended to the ecfhome and suite + folders. + + Returns + ------- + None + """ + if self.template == "skip": + return + script_name = f"{self.name()}.ecf" + ecfscript = None + search_script = f"{self.template}.ecf" if self.template is not \ + None else script_name + if parents: + script_path = f"{ecfhome}/{suite}/{parents.replace('>','/')}/{script_name}" + else: + script_path = f"{ecfhome}/{suite}/{script_name}" + for root,dirs,files in os.walk(self.scriptrepo): + if search_script in files and ecfscript is None: + ecfscript = os.path.join(root, search_script) + elif script_name in files: + print(f"More than one script named {script_name}. " + "Using the first one found.") + try: + if ecfscript is not None: + shutil.copyfile(ecfscript, script_path, follow_symlinks=True) + else: + raise ConfigurationError + except ConfigurationError: + print(f"Could not find the script {search_script}. Exiting build") + sys.exit(1) + +# define Python user-defined exceptions +class Error(Exception): + """Base class for other exceptions""" + pass + +class RangeError(Error): + """Raised when the range in the configuration file is incorrect""" + pass + +class ConfigurationError(Error): + """Raised when there is an error in the configuration file.""" + pass \ No newline at end of file diff --git a/workflow_generator/ecflow_setup/ecflow_setup.py b/workflow_generator/ecflow_setup/ecflow_setup.py new file mode 100644 index 0000000000..2754136702 --- /dev/null +++ b/workflow_generator/ecflow_setup/ecflow_setup.py @@ -0,0 +1,653 @@ +#!/usr/bin/env python3 + +""" + PROGRAM: + ecflow_setup: This setup is to read in the configuration from the YAML + file passed in by setup_workflow.py, populate the environment variables + and then pass that to the ecflow_definitions.py module to create the + suite definitions and to break down the lists, add triggers, etc. + AUTHOR: + Kyle Nevins + kyle.nevins@noaa.gov + FILE DEPENDENCIES: + None + OUTPUT: + This will return a dictionary object of suites and then save that to + a file based on the calls from the setup_workflow.py module. +""" +import yaml +import collections.abc +import os +import re +import sys +import datetime +from ecflow_setup.ecflow_definitions import Ecflowsuite + +try: + from ecflow import Defs +except ImportError as err: + print(f"Error: Could not import ecflow module: {err}") + sys.exit(1) + + +class Ecflowsetup: + """ + This class pulls in the configurations from the ecflow config file. Then + it calls the ecflow_definitions.py module to create a suite definition + from each of the suites in the YAML file. Then each of the edits, tasks, + triggers, etc. are processed and added to the suite. + + All suites are then put together as part of a definition file and finally + that file is saved. + + Attributes + ---------- + suite_array : dict + A dictionary that contains ecflow.Suite objects provided by the ecflow + module. + DEFS : ecflow.Defs + A definition object provided by the ecflow module that holds all of the + suites. + + Methods + ------- + generate_workflow() + This is the main method, used to setup the suites from the YAML file and + then call each of the supporting methods, like edits, tasks, etc. to + populate the suites with each of the items. + + raiseexception(e) + This is just a simple method that is called if an exception is raised to + print out the error message and then call a sys.exit(1) so the app stops + + save() + This saves the suite definition file to the save dir with the file name + ecflow_suite.def. + + print() + Prints out what would be populated to the suite definition file. + + add_environment_edits(suite) + The suite is passed in and the edits from the environment are added. The + environment edits are defined in the init method. + + check_dict(node, key, key_is_dict=True) + This function checks for the presence of they key inside of the node. + Used to identify it various addons need to be added into the suite. + + add_suite_edits(suite, suite_dict) + Method used to parse through the YAML file and identify any edits that + apply to the suite itself and parse them so they can be added. + + process_definition_header() + If there is an externs section in the YAML file for a suite, this adds + the externs to the header. + + add_families(suite, nodes, parents=None) + Parses through the YAML file contents and adds the nodes that are + identified as families to either the parent suite or the parent family. + + add_tasks_and_edits(suite,nodes,parents=None) + After the families are added to the suite, the individual tasks, edits, + repeats, defstatus, and room for other task addons are appended. + + add_triggers_and_events(suite, nodes) + After the families and tasks are added, then the triggers and events + are processed. This needs to come after the families and tasks and + independently because of the interdependencies that exist. For example + a trigger for a task cannot be added until the task exists, otherwise + and error will be thrown. + """ + + def __init__(self, args, env_configs): + """ + Parameters + ---------- + args : dict + The arguments passed in by the command line. + env_configs : dict + The environment variables pulled in from the experiement directory. + + Returns + ------- + None + """ + + # Setup the base variables + self.args = args + self.env_configs = env_configs + self.suite_array = {} + self.DEFS = Defs() + + # Load in the ecflow configurations + base_ecflowconfig = load_ecflow_config(f'{args.ecflow_config}') + self.ecfconf = update_ecflow_config(base_ecflowconfig, env_configs) + + self.ecfhome = env_configs['base']['ECFgfs'] + + if 'scriptrepo' in self.ecfconf.keys(): + self.env_configs['base']['scriptrepo'] = self.ecfconf['scriptrepo'] + elif 'scriptrepo' not in self.env_configs['base'].keys(): + self.env_configs['base']['scriptrepo'] = f"{self.ecfhome}/scripts" + self.scriptrepo = self.env_configs['base']['scriptrepo'] + + # Setup the default edits from the environment + self.environment_edits = [ + 'ACCOUNT', + 'queue', + 'machine', + 'RUN_ENVIR', + ] + + def generate_workflow(self): + """ + This is the main method, used to setup the suites from the YAML file and + then call each of the supporting methods, like edits, tasks, etc. to + populate the suites with each of the items. + + Methods + ------- + get_suite_names(suitename) + In the event that the suite uses a list definition [X,Y,Z...], this + method will generate an array of the properly formatted names. + """ + + def get_suite_names(suitename): + """ + In the event that the suite uses a list definition [X,Y,Z...], this + method will generate an array of the properly formatted names. + + This is internal to the generate_workflow method and is only called + from within. The names are split out using regex if it is a list. + + Parameters + ---------- + suitename : str + A string representation of the + + Returns + ------- + array + If not a list, returns an array with the suitename paraemeter as + the only object. If it is a list, return all the names. + """ + + # Check to see if the name actually has a list, if not return an + # array with just the suite name as object in place 0. + if not re.search(r".*\[.*\].*", suitename): + return [f"{suitename}"] + + # If the name does have a list, break apart the prefix and suffix + # from the list and then run it through a for loop to get all + # possible values. + name_token = re.search("(.*)\[(.*)\](.*)", suitename) + base = name_token.group(1).strip() + list_items = name_token.group(2).strip().split(',') + suffix = name_token.group(3).strip() + name_array = [] + for item in list_items: + name_array.append(f"{base}{item}{suffix}") + return name_array + + # Add in extern headers + self.process_definition_header() + + # Process each of the suites + for suite in self.ecfconf['suites'].keys(): + if suite not in {'externs', 'edits'}: + for suite_name in get_suite_names(suite): + if suite_name not in self.suite_array.keys(): + new_suite = Ecflowsuite(suite_name, self.env_configs['base']['ECFgfs']) + else: + new_suite = self.suite_array[suite_name] + if new_suite.get_suite_name() not in self.suite_array.keys(): + self.add_environment_edits(new_suite) + self.add_suite_edits(new_suite, self.ecfconf['suites'][suite]) + if self.check_dict(self.ecfconf['suites'][suite], 'nodes'): + self.add_families(new_suite, self.ecfconf['suites'][suite]['nodes']) + self.add_tasks_and_edits(new_suite, self.ecfconf['suites'][suite]['nodes']) + self.suite_array[new_suite.get_suite_name()] = new_suite + + for suite in self.ecfconf['suites'].keys(): + if suite not in {'externs', 'edits'}: + for suite_name in get_suite_names(suite): + if self.check_dict(self.ecfconf['suites'][suite], 'nodes'): + self.add_triggers_and_events(self.suite_array[suite_name], + self.ecfconf['suites'][suite]['nodes']) + + # Add each suite to the definition object that will be used for the save + # or print. + for suite_name, suite in self.suite_array.items(): + self.DEFS += suite.get_suite() + + def raiseexception(self, e): + """ + This is just a simple method that is called if an exception is raised to + print out the error message and then call a sys.exit(1) so the app stops + + Calling this method will cause the application to exit with a status + code of 1. + + Parameters + ---------- + e : str + The error in string format to print out. + + Returns + ------- + None + """ + + print(e) + sys.exit(1) + + def save(self): + """ + This saves the suite definition file to the save dir with the file name + ecflow_suite.def. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + print("Saving definition File") + savedir = self.args.savedir + defs_file = f"{savedir}/ecflow_suite.def" + self.DEFS.save_as_defs(defs_file) + + def print(self): + """ + Prints out what would be populated to the suite definition file. + + Parameters + ---------- + None + + Returns + ------- + None + """ + print(self.DEFS.check()) + print(self.DEFS) + + def add_environment_edits(self, suite): + """ + The suite is passed in and the edits from the environment are added. The + environment edits are defined in the init method. + + This method assumes that there are environment edits that have been set + by the experiement setup. + + Parameters + ---------- + suite : str + The name of the suite that will be used to add the environment edits + + Returns + ------- + None + """ + + # Add in the ECF Home and ECF Include edits. + suite.add_edit({'ECF_HOME': self.ecfhome, 'ECF_INCLUDE': self.ecfhome}) + + # Add in the edits for the environment. + for edit in self.environment_edits: + edit = edit.upper() + if (edit in self.env_configs['base'].keys() and + self.env_configs['base'][edit] is not None): + edit_dict = {edit: self.env_configs['base'][edit]} + elif (edit.lower() in self.env_configs['base'].keys() and + self.env_configs['base'][edit.lower()] is not None): + edit_dict = {edit: self.env_configs['base'][edit.lower()]} + suite.add_edit(edit_dict) + + def check_dict(self, node, key, key_is_dict=True): + """ + This function checks for the presence of they key inside of the node. + Used to identify it various addons need to be added into the suite. + + If the node is a dict, it checks for the presence of the key but it also + needs to know if the key it is looking for is a dictionary or not. + + Parameters + ---------- + node : dict or str + The dictionary or string object to search for the presence of the + key + key : str + The search string to look for in the node objects + key_is_dict : bool + Checks if the key is a dictionary or if it should be searching for + a string. + + Returns + ------- + bool + True if the key is present, false otherwise. + """ + + if isinstance(node, dict) and f'{key}' in node.keys(): + if key_is_dict and isinstance(node[f'{key}'], dict): + return True + elif not key_is_dict: + return True + else: + return False + + def add_suite_edits(self, suite, suite_dict): + """ + Method used to parse through the YAML file and identify any edits that + apply to the suite itself and parse them so they can be added. + + Parameters + ---------- + suite : string + Name of the suite that needs the edits added. + suite_dict : + The dictionary for the suite that was passed in. + + Returns + ------- + None + """ + + # Baseline edits + if 'edits' in self.ecfconf['suites'].keys(): + suite.add_edit(self.ecfconf['suites']['edits']) + + # Setup sutite specific edits + if type(suite_dict) is dict and 'edits' in suite_dict.keys(): + suite.add_edit(suite_dict['edits']) + + def process_definition_header(self): + """ + If there is an externs section in the YAML file for a suite, this adds + the externs to the header. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + if 'externs' in self.ecfconf.keys(): + for extern in self.ecfconf['externs']: + self.DEFS.add_extern(extern) + + def add_families(self, suite, nodes, parents=None): + """ + Parses through the YAML file contents and adds the nodes that are + identified as families to either the parent suite or the parent family. + + This function is recursive to build the family architecture. + + While adding families, this method also adds in the edits, repeats, + defstatus, and time parameters to the families. + + Parameters + ---------- + suite : str + The suite that the families are to be added to + nodes : dict + The nodes within the suite, can be families or tasks but only the + families are processed in this method. + parents : str + If this family is not a top level one for the suite, this string is + the list of families that came before it, used to populate the + dictionary object in the ecflow_definitions module. + + Returns + ------- + None + """ + + for item in nodes.keys(): + if isinstance(nodes[item], dict) and item not in {'edits', 'tasks'}: + suite.add_family(item, parents) + if parents: + family_path = f"{parents}>{item}" + else: + family_path = item + if self.check_dict(nodes[item], 'edits'): + suite.add_edit(nodes[item]['edits'], family_path) + if self.check_dict(nodes[item], 'repeat', False): + suite.add_repeat(nodes[item]['repeat'], family_path) + if self.check_dict(nodes[item], 'defstatus', False): + suite.add_defstatus(nodes[item]['defstatus'], family_path) + self.add_families(suite, nodes[item], family_path) + + def add_tasks_and_edits(self, suite, nodes, parents=None): + """ + After the families are added to the suite, the individual tasks, edits, + repeats, defstatus, and room for other task addons are appended. + + This is a recursive function that parses through the whole dictionary + of tasks and families to identify any tasks and add them to a family. + + This also adds in the defstatus, ediuts, repeats, times, etc. for the + tasks. + + Parameters + ---------- + suite : str + The suite the tasks need to be added to. + nodes : dict + Contains all the tasks and families for the parent node. + parents : str + The parent family for any of the tasks + + Returns + ------- + None + """ + + for item in nodes.keys(): + if isinstance(nodes[item], dict) and item == 'tasks': + for task in nodes['tasks'].keys(): + if self.check_dict(nodes['tasks'][task], 'template', False): + task_template = nodes['tasks'][task]['template'] + else: + task_template = None + updated_task = find_env_param(task, 'env.', + self.env_configs) + suite.add_task(updated_task, parents, + self.scriptrepo, task_template) + if self.check_dict(nodes['tasks'][task], + 'edits'): + suite.add_task_edits(updated_task, + nodes['tasks'][task]['edits']) + if self.check_dict(nodes['tasks'][task], + 'repeat', False): + suite.add_task_repeat(updated_task, + nodes['tasks'][task]['repeat']) + if self.check_dict(nodes['tasks'][task], + 'defstatus', False): + suite.add_task_defstatus(updated_task, + nodes['tasks'] + [task]['defstatus']) + elif (isinstance(nodes[item], dict) and + item != 'edits'): + if parents: + family_path = f"{parents}>{item}" + else: + family_path = item + self.add_tasks_and_edits(suite, nodes[item], family_path) + + def add_triggers_and_events(self, suite, nodes): + """ + After the families and tasks are added, then the triggers and events + are processed. This needs to come after the families and tasks and + independently because of the interdependencies that exist. For example + a trigger for a task cannot be added until the task exists, otherwise + and error will be thrown. + + This is a recursive function and will parse through each family/task + to identify the work. + + Parameters + ---------- + suite : str + The suite to key off for adding the triggers + nodes : dict + The families/tasks that need to be parsed. + + Returns + ------- + None + """ + + for item in nodes.keys(): + if isinstance(nodes[item], dict) and item == 'tasks': + for task in nodes['tasks'].keys(): + updated_task = find_env_param(task, 'env.', + self.env_configs) + if self.check_dict(nodes['tasks'][task], 'events', False): + suite.add_task_events(updated_task, + nodes['tasks'][task]['events']) + if self.check_dict(nodes['tasks'][task], 'triggers', False): + suite.add_task_triggers(updated_task, + nodes['tasks'][task]['triggers'], + self.suite_array) + elif isinstance(nodes[item], dict): + self.add_triggers_and_events(suite, nodes[item]) + + +def load_ecflow_config(configfile): + """ + This is the function to safely load the configuration file for the ecflow + environment. This is the base YAML that is built specifically for this + application and then returns it. + + Parameters + ---------- + configfile : str + The path to the configuration file that is to be loaded as part of the + ecflow config. + + Returns + ------- + dict + The dictionary results of the YAML safe load from the configuration + file. + """ + + with open(configfile, 'r') as file: + base_config = yaml.safe_load(file) + return base_config + + +def find_env_param(node, value, envconfig): + """ + Since there are components of the configuration that might get passed in + that are supposed to be replaced by environment variables AFTER the + configuration file has been loaded, this function is called in some of the + Ecflowsetup functions to allow the replacement of those parameters as + needed. + + Parameters + ---------- + node : dict + A dictionary object of the items that need to be scanned for replacing + value : str + A string object that is the prefix to be scanned and then the value + identified after identifier string is replaced with an environment + variable. + envconfig : dict + The dictionary of existing environment variables that are read in from + the experiment setup. + + Returns + ------- + new_node : dict + The updated dictionary object that will replace the node object that + was passed in when the function was called. + """ + + new_node = node + if value in node: + variable_lookup = re.search(f".*{value}([\dA-Za-z_]*)", node).group(1).strip() + if variable_lookup in os.environ: + if isinstance(os.environ[variable_lookup], datetime.datetime): + new_variable = os.environ[variable_lookup].strftime("%Y%m%d%H") + else: + new_variable = os.environ[variable_lookup] + else: + if isinstance(envconfig['base'][variable_lookup], + datetime.datetime): + new_variable = envconfig['base'][variable_lookup].strftime("%Y%m%d%H") + else: + new_variable = envconfig['base'][variable_lookup] + search_key = re.search(r"(.*)(env\.[\dA-Za-z_]*)(.*)", node) + new_node = f"{search_key.group(1)} {new_variable} {search_key.group(3)}" + return new_node + + +def update_ecflow_config(configfile, envconfig): + """ + After the YAML file that drives the application is loaded in, the configs + need to be updated with anything that has the env. prefix to it and replace + that value with the environment variable. + + Parameters + ---------- + configfile : dict + The dictionary of the YAML configuration file read in. + envconfig : dict + The dictionary of objects that were read in from the experiment setup + on the supercomputer. + + Returns + ------- + config : dict + The updated configuration with the environment variables replaced. + """ + + def runupdate(nested_dict, value): + """ + To scan through the entire nested dictionary the run update was an easy + local function to use to provide recursion given that the parent + function did not work properly when trying to use a recursive call. + + Parameters + ---------- + nested_dict : dict + The nested dictionary to scan and replace the values. + value : str + The string to search for the replacement, currently set to env. + + Returns + ------- + nested_dict : dict + The updated dictionary with all of the values replaced as necessary. + """ + for k, v in nested_dict.items(): + if isinstance(v, str) and value in v: + lookup = v.split('.') + variable_lookup = re.findall("[\dA-Za-z_]*", lookup[1])[0] + if variable_lookup in os.environ: + if isinstance(os.environ[variable_lookup], datetime.datetime): + nested_dict[k] = os.environ[variable_lookup].strftime("%Y%m%d%H") + else: + nested_dict[k] = os.environ[variable_lookup] + + else: + if isinstance(envconfig['base'][variable_lookup], datetime.datetime): + envvalue = envconfig['base'][variable_lookup].strftime("%Y%m%d%H") + else: + envvalue = envconfig['base'][variable_lookup] + nested_dict[k] = envvalue + elif isinstance(v, collections.abc.Mapping): + nested_dict[k] = runupdate(v, value) + return nested_dict + + config = runupdate(configfile, 'env.') + return config diff --git a/workflow_generator/prod.yml b/workflow_generator/prod.yml new file mode 100644 index 0000000000..a59ee2196c --- /dev/null +++ b/workflow_generator/prod.yml @@ -0,0 +1,425 @@ +--- +suites: + prod00: + edits: + CYC: '00' + prod06: + edits: + CYC: '06' + nodes: + gfs: + atmos: + post: + tasks: + jgfs_atmos_post_manager: + triggers: + - task: jgfs_atmos_tropcy_qc_reloc + suite: prod00 + prod[00,06]: + edits: + ECF_TRIES: '1' + ENVIR: 'prod' + PROJ: 'GFS' + PROJENVIR: 'DEV' + QUEUESHARED: 'dev_shared' + QUEUESERV: 'dev_transfer' + MACHINE_SITE: 'development' + nodes: + gfs: + edits: + RUN: 'gfs' + NET: 'gfs' + tasks: + jgfs_forecast: + triggers: + - task: jgfs_atmos_analysis + event: release_fcst + - task: jgfs_wave_prep + atmos: + obsproc: + dump: + tasks: + jgfs_atmos_tropcy_qc_reloc: + events: + - 'jtwc_bull_email' + prep: + tasks: + jgfs_atmos_emcsfc_sfc_prep: + triggers: + - task: jobsproc_gfs_atmos_dump + event: release_sfcprep + analysis: + tasks: + jgfs_atmos_analysis: + triggers: + - task: jobsproc_gfs_atmos_prep + - task: jgfs_atmos_emcsfc_sfc_prep + events: + - "release_fcst" + jgfs_atmos_analysis_calc: + triggers: + - task: jgfs_atmos_analysis + post: + tasks: + jgfs_atmos_post_manager: + triggers: + - task: jgfs_atmos_analysis + events: + - "release_postanl" + - "release_post( 384 )" + jgfs_atmos_post_anl: + template: skip + triggers: + - task: jgfs_atmos_post_manager + event: release_postanl + edits: + FHRGRP: '000' + FHRLST: 'anl' + HR: 'anl' + FHR: 'anl' + jgfs_atmos_post_f( 384 ): + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager + event: release_post( ) + edits: + FHRGRP: '( 1, )' + FHRLST: 'f( )' + FHR: 'f( )' + HR: '( )' + post_processing: + tasks: + jgfs_atmos_wafs_gcip: + triggers: + - task: jgfs_atmos_post_f003 + grib_wafs: + tasks: + jgfs_atmos_wafs_f000: + template: jgfs_atmos_wafs_master + triggers: + - task: jgfs_atmos_post_f000 + - task: jgfs_atmos_post_f120 + - task: jgfs_atmos_wafs_grib2 + edits: + FCSTHR: '000' + jgfs_atmos_wafs_f( 6,20,6 ): + template: jgfs_atmos_wafs_master + triggers: + - task: jgfs_atmos_post_f( ) + - task: jgfs_atmos_wafs_f( 0,,6 ) + edits: + FCSTHR: ( ) + grib2_wafs: + tasks: + jgfs_atmos_wafs_grib2: + triggers: + - task: jgfs_atmos_post_f000 + jgfs_atmos_wafs_grib2_0p25: + triggers: + - task: jgfs_atmos_post_f036 + jgfs_atmos_wafs_blending: + triggers: + - task: jgfs_atmos_wafs_grib2 + jgfs_atmos_wafs_blending_0p25: + triggers: + - task: jgfs_atmos_wafs_grib2_0p25 + bufr_sounding: + tasks: + jgfs_atmos_postsnd: + triggers: + - task: jgfs_atmos_post_manager + event: release_post000 + bulletins: + tasks: + jgfs_atmos_fbwind: + triggers: + - task: jgfs_atmos_post_f( 6,3,6 ) + awips_20km_1p0: + tasks: + jgfs_atmos_awips_f( 0,80,3 ): + template: jgfs_atmos_awips_master + triggers: + - task: jgfs_atmos_post_f( ) + edits: + FHRGRP: '( )' + FHRLST: 'f( )' + FCSTHR: '( )' + TRDRUN: 'YES' + jgfs_atmos_awips_f( 3,27,6 ): + edits: + TRDRUN: 'NO' + awips_g2: + tasks: + jgfs_atmos_awips_g2_f( 0,80,6 ): + template: jgfs_atmos_awips_g2_master + triggers: + - task: jgfs_atmos_post_f( ) + edits: + FHRGRP: '( )' + FHRLST: 'f( )' + FCSTHR: '( )' + TRDRUN: 'YES' + gempak: + tasks: + jgfs_atmos_gempak: + triggers: + - task: jgfs_atmos_analysis + jgfs_atmos_gempak_meta: + triggers: + - task: jgfs_atmos_analysis + jgfs_atmos_gempak_ncdc_upapgif: + triggers: + - task: jgfs_atmos_gempak + - task: jgfs_atmos_gempak + jgfs_atmos_npoess_pgrb2_0p5deg: + triggers: + - task: jgfs_atmos_post_anl + state: activate + - task: jgfs_atmos_post_anl + jgfs_atmos_pgrb2_spec_gempak: + triggers: + - task: jgfs_atmos_npoess_pgrb2_0p5deg + verf: + tasks: + jgfs_atmos_vminmon: + triggers: + - task: jgfs_atmos_analysis + wave: + init: + tasks: + jgfs_wave_init: + triggers: + - task: jobsproc_gfs_atmos_prep + prep: + tasks: + jgfs_wave_prep: + triggers: + - task: jgfs_wave_init + post: + tasks: + jgfs_wave_postsbs: + triggers: + - task: jgfs_atmos_post_manager + event: release_post000 + jgfs_wave_postpnt: + triggers: + - task: jgfs_forecast + jgfs_wave_post_bndpnt: + triggers: + - task: jgfs_atmos_post_manager + event: release_post180 + jgfs_wave_post_bndpntbll: + triggers: + - task: jgfs_atmos_post_manager + event: release_post180 + jgfs_wave_prdgen_gridded: + triggers: + - task: jgfs_wave_postsbs + state: active + - task: jgfs_wave_postsbs + operand: False + jgfs_wave_prdgen_bulls: + triggers: + - task: jgfs_wave_postpnt + - task: jgfs_wave_postsbs + gempak: + tasks: + jgfs_wave_gempak: + triggers: + - task: jgfs_wave_postsbs + state: active + - task: jgfs_wave_postsbs + operand: False + gdas: + edits: + RUN: 'gdas' + tasks: + jgdas_forecast: + triggers: + - task: jgdas_atmos_analysis + event: release_fcst + - task: jgdas_wave_prep + - task: jgdas_atmos_gldas + atmos: + obsproc: + dump: + tasks: + jgdas_atmos_tropcy_qc_reloc: + prep: + tasks: + jgdas_atmos_emcsfc_sfc_prep: + triggers: + - task: jobsproc_gdas_atmos_dump + event: release_sfcprep + init: + tasks: + jgdas_atmos_gldas: + triggers: + - task: jgdas_atmos_analysis + analysis: + tasks: + jgdas_atmos_analysis: + triggers: + - task: jobsproc_gdas_atmos_prep + - task: jgdas_atmos_emcsfc_sfc_prep + events: + - "release_fcst" + jgdas_atmos_analysis_calc: + triggers: + - task: jgdas_atmos_analysis + jgdas_atmos_analysis_diag: + triggers: + - task: jgdas_atmos_analysis + post: + tasks: + jgdas_atmos_post_manager: + triggers: + - task: jgdas_forecast + state: active + events: + - "release_postanl" + - "release_post( 10 )" + jgdas_atmos_post_anl: + template: jgdas_atmos_post_master + triggers: + - task: jgdas_atmos_post_manager + event: release_postanl + - task: jgdas_atmos_analysis_calc + edits: + FHRGRP: '000' + FHRLST: 'anl' + HR: 'anl' + FHR: 'anl' + jgdas_atmos_post_f( 10 ): + template: jgdas_atmos_post_master + triggers: + - task: jgdas_atmos_post_manager + event: release_post( ) + edits: + FHR: 'f( )' + HR: '( )' + FHRGRP: '( 1, )' + FHRLST: 'f( )' + post_processing: + tasks: + jgdas_atmos_chgres_forenkf: + triggers: + - task: jgdas_forecast + #- task: forecast + gempak: + tasks: + jgdas_atmos_gempak: + triggers: + - task: jgdas_forecast + jgdas_atmos_gempak_meta_ncdc: + triggers: + - task: jgdas_atmos_gempak + verf: + tasks: + jgdas_atmos_vminmon: + triggers: + - task: jgdas_atmos_analysis + jgdas_atmos_verfrad: + triggers: + - task: jgdas_atmos_analysis_diag + jgdas_atmos_verfozn: + triggers: + - task: jgdas_atmos_analysis_diag + wave: + init: + tasks: + jgdas_wave_init: + triggers: + - task: jobsproc_gdas_atmos_prep + prep: + tasks: + jgdas_wave_prep: + triggers: + - task: jgdas_wave_init + post: + tasks: + jgdas_wave_postsbs: + triggers: + - task: jgdas_atmos_post_manager + event: release_post000 + jgdas_wave_postpnt: + triggers: + - task: jgdas_forecast + enkfgdas: + edits: + RUN: 'gdas' + analysis: + create: + tasks: + jenkfgdas_select_obs: + triggers: + - task: jobsproc_gdas_atmos_prep + jenkfgdas_diag: + triggers: + - task: jenkfgdas_select_obs + jenkfgdas_update: + triggers: + - task: jenkfgdas_diag + recenter: + ecen: + grp1: + tasks: + jenkfgdas_ecen: + edits: + FHRGRP: '003' + grp2: + tasks: + jenkfgdas_ecen: + edits: + FHRGRP: '006' + grp3: + tasks: + jenkfgdas_ecen: + edits: + FHRGRP: '009' + tasks: + jenkfgdas_sfc: + triggers: + - task: jgdas_atmos_analysis_calc + - task: jenkfgdas_update + #forecast: + #grp( 1,40 ): + # edits: + # ENSGRP: '( )' + # tasks: + # jenkfgdas_fcst: + post: + tasks: + jenkfgdas_post_f( 3,6 ): + template: jenkfgdas_post_master + edits: + FHMIN_EPOS: ( ) + FHMAX_EPOS: ( ) + FHOUT_EPOS: ( ) + obsproc: + defstatus: complete + v1.0: + gfs: + atmos: + dump: + tasks: + jobsproc_gfs_atmos_dump: + template: skip + events: + - "release_sfcprep" + prep: + tasks: + jobsproc_gfs_atmos_prep: + template: skip + gdas: + atmos: + dump: + tasks: + jobsproc_gdas_atmos_dump: + template: skip + events: + - "release_sfcprep" + prep: + tasks: + jobsproc_gdas_atmos_prep: + template: skip diff --git a/workflow_generator/setup_workflow.py b/workflow_generator/setup_workflow.py new file mode 100644 index 0000000000..7166397534 --- /dev/null +++ b/workflow_generator/setup_workflow.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 + +""" + PROGRAM: + Create a workflow file for use by a supercomputer. + AUTHOR: + Kyle Nevins + kyle.nevins@noaa.gov + FILE DEPENDENCIES: + 1. The configuration file that defines what jobs to run. It should be a + YAML file following the syntax defined in the README. + 2. config files for the experiment; e.g. config.base, config.fcst[.gfs] + etc. + Without this dependency, the script will fail + 3. The workflow utils package from the existing Rocoto generator. That + is used to read in the configuration files in the expdir. + 4. Any scripts defined in the YAML file must be present within the + script repository. + OUTPUT: + 1. Either an ecFlow definition file or a Rocoto XML file + 2. The folders and scripts needed to run either the ecflow suite or + Rocoto suite. +""" + +import os +import sys +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +sys.path.append(os.path.join(os.path.dirname(__file__), "../ush/rocoto")) +import workflow_utils as wfu + + +def parse_command_line(): + """Parse the arguments from the command line + + This function pulls in the command line arguments and parses them using the + argparse module. + + Parameters + ---------- + None + + Returns + ------- + arguments : array + An array of the arguments that were passed in as well as any + that were defaulted. + """ + parser = ArgumentParser(description=""" Create the workflow files for + ecFlow by deploying scripts and definition + files or Rocoto""", + formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument('--ecflow-config', type=str, + default='ecflow_build.yml', required=False, + help='ecFlow Generator configuration file') + parser.add_argument('--expdir', type=str, + required=False, default=os.environ['PWD'], + help="""This is to be the full path to experiment' + 'directory containing config files""") + parser.add_argument('--savedir', type=str, + default=os.environ['PWD'], required=False, + help='Location to save the definition files') + arguments = parser.parse_args() + + return arguments + + +def main(): + """Main function to start the workflow generator + + This is the main function that will read in the command line arguments + using the parse_command_line function and create an array for the + environment configurations to be used throughout the application. + + For the ecFlow setup, it sets up a new workflow and then uses the generic + functions which are available for the Rocoto setup as well of + generate_workflow and save. + + ** Important note: This function does also pull from the ush/rocoto + application to use the get_configs and config_parser functions to populate + the environment variable array. + + Parameters + ---------- + None + + Returns + ------- + None + """ + args = parse_command_line() + + environment_configs = wfu.get_configs(args.expdir) + envconfigs = {} + envconfigs['base'] = wfu.config_parser([wfu.find_config('config.base', + environment_configs)]) + + # The default setup in the parse_command_line() function assumes that if + # the --ecflow-config file is set that it should be an ecflow setup. When + # Rocoto is implemented, the default for --ecflow-config should be removed + # and additional parameters added. + if args.ecflow_config is not None: + from ecflow_setup.ecflow_setup import Ecflowsetup + workflow = Ecflowsetup(args, envconfigs) + else: + import rocoto_setup + + workflow.generate_workflow() + workflow.save() + + +# Main Initializer +if __name__ == "__main__": + main() + sys.exit(0) From 6d682196c5b08dccbdfd06a6c3def54f106bce4b Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 12 May 2022 11:07:02 -0500 Subject: [PATCH 16/33] Remove p-dump if-block in config.base.emc.dyn Refs: #665 --- parm/config/config.base.emc.dyn | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index 5eb351a64a..da7ef3c6b3 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -99,10 +99,7 @@ export PSLOT="@PSLOT@" export EXPDIR="@EXPDIR@/$PSLOT" export ROTDIR="@ROTDIR@/$PSLOT" export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work -export DUMP_SUFFIX="" -#if [[ "$CDATE" -ge "2019092100" && "$CDATE" -le "2019110700" ]]; then -# export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel -#fi +export DUMP_SUFFIX="" #Note: Can be set to "p" when using p-dumps export RUNDIR="$STMP/RUNDIRS/$PSLOT" export DATAROOT="$RUNDIR/$CDATE/$CDUMP" export ARCDIR="$NOSCRUB/archive/$PSLOT" From bf43dcbbf990753858549b55c67b33af69737bba Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 12 May 2022 11:16:13 -0500 Subject: [PATCH 17/33] Add back in WCOSS_DELL_P3 references in resource configs Will remove WCOSS_DELL_P3 references after official retirement Refs: #665 --- parm/config/config.fv3.emc.dyn | 2 ++ parm/config/config.resources.emc.dyn | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/parm/config/config.fv3.emc.dyn b/parm/config/config.fv3.emc.dyn index ea9fca9ab7..72c5a85632 100755 --- a/parm/config/config.fv3.emc.dyn +++ b/parm/config/config.fv3.emc.dyn @@ -22,6 +22,8 @@ echo "BEGIN: config.fv3" if [[ "$machine" = "WCOSS2" ]]; then export npe_node_max=128 +elif [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_node_max=28 elif [[ "$machine" = "JET" ]]; then export npe_node_max=24 elif [[ "$machine" = "HERA" ]]; then diff --git a/parm/config/config.resources.emc.dyn b/parm/config/config.resources.emc.dyn index 7a36b7e2e1..7de606ecd2 100755 --- a/parm/config/config.resources.emc.dyn +++ b/parm/config/config.resources.emc.dyn @@ -24,6 +24,11 @@ echo "BEGIN: config.resources" if [[ "$machine" = "WCOSS2" ]]; then export npe_node_max=128 +elif [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_node_max=28 + if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 + export npe_node_max=40 + fi elif [[ "$machine" = "JET" ]]; then export npe_node_max=24 elif [[ "$machine" = "HERA" ]]; then @@ -139,6 +144,7 @@ elif [ $step = "anal" ]; then export npe_anal=84 export npe_anal_gfs=84 fi + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_anal=7; fi export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) if [[ "$machine" = "WCOSS2" ]]; then export npe_node_anal=15; fi export nth_cycle=$npe_node_max @@ -151,6 +157,7 @@ elif [ $step = "analcalc" ]; then export ntasks=$npe_analcalc export nth_analcalc=1 export npe_node_analcalc=$npe_node_max + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi elif [ $step = "analdiag" ]; then @@ -196,6 +203,7 @@ elif [ $step = "post" ]; then export npe_node_post=$npe_post export npe_node_post_gfs=$npe_post export npe_node_dwn=$npe_node_max + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_node_post=28 ; fi if [[ "$npe_node_post" -gt "$npe_node_max" ]]; then export npe_node_post=$npe_node_max ; fi if [[ "$npe_node_post_gfs" -gt "$npe_node_max" ]]; then export npe_node_post_gfs=$npe_node_max ; fi @@ -304,6 +312,7 @@ elif [ $step = "eobs" -o $step = "eomg" ]; then export npe_eobs=20 fi export nth_eobs=2 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_eobs=7; fi export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) if [[ "$machine" = "WCOSS2" && "$CASE" = "C768" ]]; then @@ -326,6 +335,10 @@ elif [ $step = "eupd" ]; then if [ $CASE = "C768" ]; then export npe_eupd=480 export nth_eupd=6 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export npe_eupd=960 + export nth_eupd=7 + fi if [[ "$machine" = "HERA" ]]; then export npe_eupd=150 export nth_eupd=40 @@ -333,6 +346,9 @@ elif [ $step = "eupd" ]; then elif [ $CASE = "C384" ]; then export npe_eupd=270 export nth_eupd=2 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then + export nth_eupd=9 + fi if [[ "$machine" = "HERA" ]]; then export npe_eupd=100 export nth_eupd=40 @@ -352,6 +368,7 @@ elif [ $step = "ecen" ]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=4 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_ecen=7; fi if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) export nth_cycle=$nth_ecen @@ -380,6 +397,7 @@ elif [ $step = "epos" ]; then export wtime_epos="00:15:00" export npe_epos=80 export nth_epos=4 + if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_epos=7; fi export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) elif [ $step = "postsnd" ]; then @@ -403,6 +421,11 @@ elif [ $step = "awips" ]; then export npe_node_awips=1 export nth_awips=1 export memory_awips="1GB" + if [[ "$machine" == "WCOSS_DELL_P3" ]]; then + export npe_awips=2 + export npe_node_awips=2 + export nth_awips=1 + fi elif [ $step = "gempak" ]; then From 789de9def0ee4505d78e09a7b3fd3b617461a19d Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 12 May 2022 22:01:17 +0000 Subject: [PATCH 18/33] Update target version files for external components - Update obsproc and prepobs version to rd ones - Add tracker_ver and fit_ver Refs: #665 --- versions/hera.ver | 7 +++++-- versions/orion.ver | 7 +++++-- versions/wcoss2.ver | 7 +++++-- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/versions/hera.ver b/versions/hera.ver index 043f82a748..8e8ec5d719 100644 --- a/versions/hera.ver +++ b/versions/hera.ver @@ -2,8 +2,8 @@ export hpc_ver=1.2.0 export hpc_intel_ver=18.0.5.274 export hpc_impi_ver=2018.0.4 -export obsproc_run_ver=1.0.0 -export prepobs_run_ver=1.0.0 +export obsproc_run_ver=1.0.0-rd +export prepobs_run_ver=1.0.0-rd export hpss_ver=hpss export prod_util_ver=1.2.2 @@ -14,3 +14,6 @@ export esmf_ver=8_0_1 export nco_ver=4.9.3 export gempak_ver=7.4.2 export wrf_io_ver=1.2.0 + +export tracker_ver=v1.1.15.5 +export fit_ver="newm.1.4" diff --git a/versions/orion.ver b/versions/orion.ver index 340d5f0308..e6be2107ac 100644 --- a/versions/orion.ver +++ b/versions/orion.ver @@ -2,8 +2,8 @@ export hpc_ver=1.2.0 export hpc_intel_ver=2018.4 export hpc_impi_ver=2018.4 -export obsproc_run_ver=1.0.0 -export prepobs_run_ver=1.0.0 +export obsproc_run_ver=1.0.0-rd +export prepobs_run_ver=1.0.0-rd export prod_util_ver=1.2.2 export cmake_ver=3.22.1 @@ -12,3 +12,6 @@ export python_ver=3.7.5 export wrf_io_ver=1.2.0 export esmf_ver=8_0_1 export nco_ver=4.9.3 + +export tracker_ver=v1.1.15.5 +export fit_ver="newm.1.4" diff --git a/versions/wcoss2.ver b/versions/wcoss2.ver index 40471c14d3..96048267f8 100644 --- a/versions/wcoss2.ver +++ b/versions/wcoss2.ver @@ -2,5 +2,8 @@ export envvar_ver=1.0 export prod_envir_ver=${prod_envir_ver:-2.0.4} # Allow override from ops ecflow export prod_util_ver=${prod_util_ver:-2.0.9} # Allow override from ops ecflow -export obsproc_run_ver=1.0.0 -export prepobs_run_ver=1.0.0 +export obsproc_run_ver=1.0.0-rd +export prepobs_run_ver=1.0.0-rd + +export tracker_ver=v1.1.15.5 +export fit_ver="newm.1.4" From 4aa408eb57b574839cdeb2e2dbfe84b76bc9f66c Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 12 May 2022 22:02:46 +0000 Subject: [PATCH 19/33] Change enkf forecast to run serially Based on changes made for WCOSS2, change the enkf forecast jobs to run serially everywhere for stable completions. Refs: #665 --- parm/config/config.efcs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/parm/config/config.efcs b/parm/config/config.efcs index 752d35f40a..2eb0bde36e 100755 --- a/parm/config/config.efcs +++ b/parm/config/config.efcs @@ -64,7 +64,7 @@ if [[ "$OUTPUT_FILE" == "netcdf" ]]; then export ichunk2d=0; export jchunk2d=0 export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 RESTILE=`echo $CASE_ENKF |cut -c 2-` - export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + export OUTPUT_FILETYPES=" 'netcdf' 'netcdf' " if [ $RESTILE -ge 384 ]; then export ichunk2d=$((4*RESTILE)) export jchunk2d=$((2*RESTILE)) From 631e4d094eeebc45748df2b207b5e6ab85785699 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 12 May 2022 22:07:11 +0000 Subject: [PATCH 20/33] Update config.vrfy - Turn off VSDB - Consolidate tracker section while retaining WCOSS_DELL_P3 support - Consolidate Fit2Obs sections for new version - Remove WCOSS_C references Refs: #665 --- parm/config/config.vrfy | 32 +++++++++----------------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy index f40a1e73b6..cf44f77821 100755 --- a/parm/config/config.vrfy +++ b/parm/config/config.vrfy @@ -16,10 +16,10 @@ export CDFNL="gdas" # Scores verification against GDAS/GFS analysi export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification export VRFYFITS="YES" # Fit to observations -export VSDB_STEP1="YES" # Populate VSDB database +export VSDB_STEP1="NO" # Populate VSDB database export VSDB_STEP2="NO" -export VRFYG2OBS="YES" # Grid to observations, see note below if turning ON -export VRFYPRCP="YES" # Precip threat scores +export VRFYG2OBS="NO" # Grid to observations, see note below if turning ON +export VRFYPRCP="NO" # Precip threat scores export VRFYRAD="YES" # Radiance data assimilation monitoring export VRFYOZN="YES" # Ozone data assimilation monitoring export VRFYMINMON="YES" # GSI minimization monitoring @@ -34,8 +34,7 @@ export RUNMOS="NO" # whether to run entire MOS package if [ $VRFYFITS = "YES" ]; then - export fit_ver="newm.1.3" - export fitdir="$BASE_GIT/verif/global/Fit2Obs/${fit_ver}/batrun" + export fitdir="$BASE_GIT/Fit2Obs/${fit_ver}/batrun" export PRVT=$HOMEgfs/fix/fix_gsi/prepobs_errtable.global export HYBLEVS=$HOMEgfs/fix/fix_am/global_hyblev.l${LEVS}.txt export CUE2RUN=$QUEUE @@ -47,9 +46,8 @@ if [ $VRFYFITS = "YES" ]; then export CONVNETC="YES" fi - if [ $machine = "WCOSS_C" ]; then - export fitdir="$BASE_GIT/verif/global/parafits.fv3nems/batrun" - export PREPQFITSH="$fitdir/subfits_cray_nems" + if [ $machine = "WCOSS2" ]; then + export PREPQFITSH="$fitdir/subfits_wcoss2" elif [ $machine = "WCOSS_DELL_P3" ]; then export PREPQFITSH="$fitdir/subfits_dell_nems" elif [ $machine = "HERA" ]; then @@ -142,17 +140,13 @@ fi # Cyclone genesis and cyclone track verification #------------------------------------------------- -export ens_tracker_ver=v1.1.15.1 if [ $machine = "WCOSS_DELL_P3" ] ; then export ens_tracker_ver=v1.1.15.3 + export HOMEens_tracker=$BASE_GIT/tracker/ens_tracker.${ens_tracker_ver} fi -export HOMEens_tracker=$BASE_GIT/tracker/ens_tracker.${ens_tracker_ver} -if [ $machine = "ORION" ] ; then - export HOMEens_tracker=$BASE_GIT/tracker/TC_tracker.v1.1.15.2 -fi +export HOMEens_tracker=$BASE_GIT/TC_tracker/${tracker_ver} if [ "$VRFYTRAK" = "YES" ]; then - export TRACKERSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_TRACKER" if [ "$CDUMP" = "gdas" ]; then export FHOUT_CYCLONE=3 @@ -163,22 +157,16 @@ if [ "$VRFYTRAK" = "YES" ]; then fi fi - if [[ "$VRFYGENESIS" == "YES" && "$CDUMP" == "gfs" ]]; then - export GENESISSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_GENESIS" fi if [[ "$VRFYFSU" == "YES" && "$CDUMP" == "gfs" ]]; then - export GENESISFSU="$HOMEgfs/jobs/JGFS_ATMOS_FSU_GENESIS" fi if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then - - if [ $machine = "WCOSS_C" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.cray" - elif [ $machine = "WCOSS_DELL_P3" ] ; then + if [ $machine = "WCOSS_DELL_P3" ] ; then export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.dell" elif [ $machine = "HERA" ] ; then export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.hera" @@ -189,6 +177,4 @@ if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then fi fi - - echo "END: config.vrfy" From d98c14d43ad5375fe29526c621cd92e26de21019 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 13 May 2022 14:24:05 +0000 Subject: [PATCH 21/33] Update ecf PBS excl to exclhost - Update on Dogwood implemented cgroups, which means memory limits are now enforced. - Exclusive jobs must now use "place=exclhost" insted of "place=excl". - Associated exclusive ecf script PBS statements are updated to exclhost. Refs: #399 --- ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf | 2 +- ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf | 2 +- ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf | 2 +- ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf | 2 +- ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf | 2 +- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf | 2 +- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf | 2 +- ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf | 2 +- ecf/scripts/gdas/jgdas_forecast.ecf | 2 +- ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf | 2 +- ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf | 2 +- ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf | 2 +- .../atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf | 2 +- ecf/scripts/gfs/jgfs_forecast.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf | 2 +- ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf | 2 +- 18 files changed, 18 insertions(+), 18 deletions(-) diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf index 0f14ee74ab..d2f1d4b60f 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=12:mpiprocs=40:ompthreads=3:ncpus=120 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf index a15e18126f..a32f56115e 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:30:00 #PBS -l select=35:mpiprocs=9:ompthreads=14:ncpus=126 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf index 2a72897cfa..163cda8c1d 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf index 2584b572aa..e5ca6aadfe 100755 --- a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf +++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:40:00 #PBS -l select=4:mpiprocs=128:ompthreads=1:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf index 744cad198b..9215deabe0 100755 --- a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf +++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 #PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf index f4e13909a6..b6dbb2f127 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:50:00 #PBS -l select=52:mpiprocs=15:ompthreads=8:ncpus=120 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true export model=gfs diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index 7f884e586f..a2e1124ad3 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l hyper=true #PBS -l debug=true diff --git a/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf b/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf index 8e7cf213f8..782ecaea92 100755 --- a/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf +++ b/ecf/scripts/gdas/atmos/init/jgdas_atmos_gldas.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 #PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf index e6fc754bf0..3fc5d5bb7d 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:12:00 #PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf index 2484d38d08..d894064f22 100755 --- a/ecf/scripts/gdas/jgdas_forecast.ecf +++ b/ecf/scripts/gdas/jgdas_forecast.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=01:00:00 #PBS -l select=27:mpiprocs=32:ompthreads=3:ncpus=96 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf index 6052cd9a1e..4496045141 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:40:00 #PBS -l select=55:mpiprocs=15:ompthreads=8:ncpus=120 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true export model=gfs diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf index 1fe1b33005..fa1e87a0eb 100755 --- a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf index 89691034b0..fd69e3a786 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 #PBS -l select=1:mpiprocs=126:ompthreads=1:ncpus=126 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf index decd87f480..4acbbe2b12 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 #PBS -l select=2:mpiprocs=20:ompthreads=1:ncpus=20 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/jgfs_forecast.ecf b/ecf/scripts/gfs/jgfs_forecast.ecf index 9d2b98de70..acb197f581 100755 --- a/ecf/scripts/gfs/jgfs_forecast.ecf +++ b/ecf/scripts/gfs/jgfs_forecast.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:30:00 #PBS -l select=112:mpiprocs=24:ompthreads=5:ncpus=120 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf index 01fd5c9575..8f59963c97 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=01:00:00 #PBS -l select=3:ncpus=80:ompthreads=1 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf index 0f3c7f5fd3..8464c73fa5 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=01:00:00 #PBS -l select=4:ncpus=112:ompthreads=1 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf index a0bc3a7049..bcf7f4c8c3 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=01:30:00 #PBS -l select=4:ncpus=50:ompthreads=1 -#PBS -l place=vscatter:excl +#PBS -l place=vscatter:exclhost #PBS -l debug=true model=gfs From 25e5c901b4500565d0f2864092e71222fbecb80f Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 13 May 2022 14:26:46 +0000 Subject: [PATCH 22/33] Update "excl" to "exclhost" in workflow_utils.py Need to set exclhost for exclusive jobs on WCOSS2 now after cgroups was implemented. Matches updates to exclusive job ecf script PBS statements. Refs: #399 --- ush/rocoto/workflow_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/rocoto/workflow_utils.py b/ush/rocoto/workflow_utils.py index 55337abf99..f687f852ca 100755 --- a/ush/rocoto/workflow_utils.py +++ b/ush/rocoto/workflow_utils.py @@ -338,7 +338,7 @@ def get_resources(machine, cfg, task, reservation, cdump='gdas'): if machine in ['WCOSS2'] and task not in ['arch', 'earc', 'getic']: natstr = "-l place=vscatter" if memory is None: - natstr += ":excl" + natstr += ":exclhost" elif machine in ['WCOSS']: resstr = f'{tasks}' From 19fcfa634f672dda6c194c294042f521c1e5c63d Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 13 May 2022 14:30:12 +0000 Subject: [PATCH 23/33] Add HOMEobsproc back to config.base.nco.static Will consider removing at later date. Refs: #399 --- parm/config/config.base.nco.static | 1 + 1 file changed, 1 insertion(+) diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 02da0bca83..825b20587a 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -64,6 +64,7 @@ export REALTIME="YES" export FIXgsi="$HOMEgfs/fix/fix_gsi" export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" export HOMEpost="$HOMEgfs" +export HOMEobsproc="/lfs/h1/ops/prod/packages/obsproc.v1.0.0" export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" # CONVENIENT utility scripts and other environment parameters From f33a2ed9bc2196cc9d3b67ec74ec2da9eb1bdb4c Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 13 May 2022 15:01:13 +0000 Subject: [PATCH 24/33] EnKf forecast serial netcdf updates and DELTIM=200 - Update config.efcs to run EnKF forecast job with serial netcdf instead of parallel netcdf. Based on joint decision between NCO and EMC. - Update C384 config.fv3.nco.static block to set DELTIM=200 (NCO request). - Update C384 config.fv3.nco.static block to set WRITE_GROUP=2 to speed up serial EnKF forecast jobs to fit inside needed window in ops. Refs: #399 --- parm/config/config.efcs | 2 +- parm/config/config.fv3.nco.static | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/config/config.efcs b/parm/config/config.efcs index 752d35f40a..2eb0bde36e 100755 --- a/parm/config/config.efcs +++ b/parm/config/config.efcs @@ -64,7 +64,7 @@ if [[ "$OUTPUT_FILE" == "netcdf" ]]; then export ichunk2d=0; export jchunk2d=0 export ichunk3d=0; export jchunk3d=0; export kchunk3d=0 RESTILE=`echo $CASE_ENKF |cut -c 2-` - export OUTPUT_FILETYPES=" 'netcdf_parallel' 'netcdf' " + export OUTPUT_FILETYPES=" 'netcdf' 'netcdf' " if [ $RESTILE -ge 384 ]; then export ichunk2d=$((4*RESTILE)) export jchunk2d=$((2*RESTILE)) diff --git a/parm/config/config.fv3.nco.static b/parm/config/config.fv3.nco.static index 831b376f90..a2aaaba0d5 100755 --- a/parm/config/config.fv3.nco.static +++ b/parm/config/config.fv3.nco.static @@ -75,7 +75,7 @@ case $case_in in export WRTIOBUF="8M" ;; "C384") - export DELTIM=240 + export DELTIM=200 export layout_x=8 export layout_y=8 export layout_x_gfs=6 @@ -85,7 +85,7 @@ case $case_in in export nth_fv3=1 export nth_fv3_gfs=1 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export WRITE_GROUP=1 + export WRITE_GROUP=2 export WRTTASK_PER_GROUP=64 export WRITE_GROUP_GFS=1 export WRTTASK_PER_GROUP_GFS=64 From 7d2d9c69ebdf4a38b9b259dc4300e2ae8668cffa Mon Sep 17 00:00:00 2001 From: "Kate.Friedman" Date: Mon, 16 May 2022 16:07:21 +0000 Subject: [PATCH 25/33] Add -g and -traceback flags to utility builds if missing - Reviewed the various global-workflow utility code builds and added "-g" and/or "-traceback" flags if missing. - Added to only WCOSS2 makefiles/modulefiles for now for ops. Refs: #399, #791 --- modulefiles/modulefile.fv3nc2nemsio.wcoss2.lua | 2 +- sorc/build_enkf_chgres_recenter.sh | 2 +- sorc/build_enkf_chgres_recenter_nc.sh | 2 +- sorc/fbwndgfs.fd/makefile.GENERIC | 2 +- sorc/gaussian_sfcanl.fd/makefile.sh | 2 +- sorc/supvit.fd/makefile | 2 +- util/sorc/mkgfsawps.fd/makefile.wcoss2 | 2 +- util/sorc/overgridid.fd/makefile | 2 +- util/sorc/rdbfmsua.fd/makefile.wcoss2 | 2 +- util/sorc/webtitle.fd/makefile | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/modulefiles/modulefile.fv3nc2nemsio.wcoss2.lua b/modulefiles/modulefile.fv3nc2nemsio.wcoss2.lua index 3b25fd7906..b72f0c13cd 100755 --- a/modulefiles/modulefile.fv3nc2nemsio.wcoss2.lua +++ b/modulefiles/modulefile.fv3nc2nemsio.wcoss2.lua @@ -15,4 +15,4 @@ load(pathJoin("w3nco", os.getenv("w3nco_ver"))) load(pathJoin("nemsio", os.getenv("nemsio_ver"))) setenv("FCMP","ftn") -setenv("FFLAGS","-free -O3") +setenv("FFLAGS","-free -O3 -g -traceback") diff --git a/sorc/build_enkf_chgres_recenter.sh b/sorc/build_enkf_chgres_recenter.sh index 6102007181..1b2718abeb 100755 --- a/sorc/build_enkf_chgres_recenter.sh +++ b/sorc/build_enkf_chgres_recenter.sh @@ -15,7 +15,7 @@ fi cd ${cwd}/enkf_chgres_recenter.fd -export FFLAGS="-O3 -r8 -i4 -qopenmp -traceback -fp-model precise" +export FFLAGS="-O3 -r8 -i4 -qopenmp -g -traceback -fp-model precise" make clean make diff --git a/sorc/build_enkf_chgres_recenter_nc.sh b/sorc/build_enkf_chgres_recenter_nc.sh index 314e378925..acddd6545b 100755 --- a/sorc/build_enkf_chgres_recenter_nc.sh +++ b/sorc/build_enkf_chgres_recenter_nc.sh @@ -15,7 +15,7 @@ fi cd ${cwd}/enkf_chgres_recenter_nc.fd -export FFLAGS="-O3 -qopenmp -traceback -fp-model precise" +export FFLAGS="-O3 -qopenmp -g -traceback -fp-model precise" export FV3GFS_NCIO_LIB="${cwd}/gsi.fd/build/lib/libfv3gfs_ncio.a" export FV3GFS_NCIO_INC="${cwd}/gsi.fd/build/include" diff --git a/sorc/fbwndgfs.fd/makefile.GENERIC b/sorc/fbwndgfs.fd/makefile.GENERIC index 30b2f8d823..7cedf6242a 100755 --- a/sorc/fbwndgfs.fd/makefile.GENERIC +++ b/sorc/fbwndgfs.fd/makefile.GENERIC @@ -50,7 +50,7 @@ PROFLIB = -lprof # To compile with flowtracing turned on, use the second line # To compile giving profile additonal information, use the third line # WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 +FFLAGS = -O3 -g -traceback -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 #FFLAGS = -F #FFLAGS = -Wf"-ez" diff --git a/sorc/gaussian_sfcanl.fd/makefile.sh b/sorc/gaussian_sfcanl.fd/makefile.sh index 63730beea6..d81a81e7da 100755 --- a/sorc/gaussian_sfcanl.fd/makefile.sh +++ b/sorc/gaussian_sfcanl.fd/makefile.sh @@ -1,6 +1,6 @@ #!/bin/sh -export FFLAGS="-O3 -fp-model precise -g -r8 -i4" +export FFLAGS="-O3 -fp-model precise -g -traceback -r8 -i4" # for debugging #export FFLAGS="-g -r8 -i4 -warn unused -check bounds" diff --git a/sorc/supvit.fd/makefile b/sorc/supvit.fd/makefile index f0c9e20864..d6aae911d8 100644 --- a/sorc/supvit.fd/makefile +++ b/sorc/supvit.fd/makefile @@ -7,7 +7,7 @@ LDFLAGS= ##ccs FFLAGS= -O -qflttrap=ov:zero:inv:enable -qcheck -qextchk -qwarn64 -qintsize=$(ISIZE) -qrealsize=$(RSIZE) # FFLAGS= -O2 -check bounds -check format -xHost -fpe0 # DEBUG= -check bounds -check format -FFLAGS= -O2 -g -i$(ISIZE) -r$(RSIZE) +FFLAGS= -O2 -g -traceback -i$(ISIZE) -r$(RSIZE) supvit: supvit_main.f supvit_modules.o @echo " " diff --git a/util/sorc/mkgfsawps.fd/makefile.wcoss2 b/util/sorc/mkgfsawps.fd/makefile.wcoss2 index 33aa2b3c37..b24f3aece0 100755 --- a/util/sorc/mkgfsawps.fd/makefile.wcoss2 +++ b/util/sorc/mkgfsawps.fd/makefile.wcoss2 @@ -26,7 +26,7 @@ PROFLIB = -lprof # To compile with flowtracing turned on, use the second line # To compile giving profile additonal information, use the third line # WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format +FFLAGS = -O3 -g -traceback -convert big_endian -r8 -i4 -assume noold_ldout_format # Lines from here on down should not need to be changed. They are the # actual rules which make uses to build a.out. diff --git a/util/sorc/overgridid.fd/makefile b/util/sorc/overgridid.fd/makefile index 9acbe31420..9187afa8f2 100755 --- a/util/sorc/overgridid.fd/makefile +++ b/util/sorc/overgridid.fd/makefile @@ -1,7 +1,7 @@ LIBS = ${W3NCO_LIB4} ${BACIO_LIB4} OBJS= overgridid.o overgridid: overgridid.f - ftn -o overgridid overgridid.f $(LIBS) + ftn -g -traceback -o overgridid overgridid.f $(LIBS) clean: -rm -f $(OBJS) diff --git a/util/sorc/rdbfmsua.fd/makefile.wcoss2 b/util/sorc/rdbfmsua.fd/makefile.wcoss2 index 6b26f801a7..fc03f717ea 100755 --- a/util/sorc/rdbfmsua.fd/makefile.wcoss2 +++ b/util/sorc/rdbfmsua.fd/makefile.wcoss2 @@ -41,7 +41,7 @@ OBJS= rdbfmsua.o FC = ftn # FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include # FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} +FFLAGS = -g -traceback -I${GEMINC} -I${OS_INC} # LDFLAGS = -O3 -q32 -s # LDFLAGS = -Wl,-Map,MAPFILE diff --git a/util/sorc/webtitle.fd/makefile b/util/sorc/webtitle.fd/makefile index f0114a5f15..f6594b41cc 100755 --- a/util/sorc/webtitle.fd/makefile +++ b/util/sorc/webtitle.fd/makefile @@ -16,7 +16,7 @@ FC = ftn LIBS= ${W3NCO_LIB4} CMD = webtitle -FFLAGS = +FFLAGS = -g -traceback #FFLAGS = -debug # Lines from here on down should not need to be changed. They are the From 819b088c7637b4be1e08e2007f9cf42ae671b5a0 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 16 May 2022 18:25:10 +0000 Subject: [PATCH 26/33] Updated error handling in gfs_bufr script Bo Cui updated gfs_bufr.sh to improve error handling Refs: #399, #790 --- ush/gfs_bufr.sh | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index 7897869dc8..7b8c219315 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -112,4 +112,13 @@ ln -sf ${STNLIST:-$PARMbufrsnd/bufr_stalist.meteo.gfs} fort.8 ln -sf $PARMbufrsnd/bufr_ij13km.txt fort.7 ${APRUN_POSTSND} $EXECbufrsnd/gfs_bufr < gfsparm > out_gfs_bufr_$FEND -export err=$?;err_chk + +export err=$? + +if [ $err -ne 0 ]; then + msg="GFS postsnd job error, Please check files " + echo $msg + echo $COMIN/${RUN}.${cycle}.atmf${hh2}.${atmfm} + echo $COMIN/${RUN}.${cycle}.sfcf${hh2}.${atmfm} + err_chk +fi From 46ba40b9a601570c0c8bc804798ad726060487cc Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 16 May 2022 18:31:16 +0000 Subject: [PATCH 27/33] Resource updates for analysis_calc job on WCOSS2 - remove hyper=true in jgdas_atmos_analysis_calc.ecf - add export nth_echgres=$nth_echgres_gfs when CDUMP=gfs in config.analcalc; for correct thread setting at runtime - add export nth_echgres=4 to analcalc block in config.resources - add export nth_echgres_gfs=12 to analcalc block in config.resources Refs: #399 --- ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf | 1 - parm/config/config.analcalc | 4 ++++ parm/config/config.resources.emc.dyn | 2 ++ parm/config/config.resources.nco.static | 2 ++ 4 files changed, 8 insertions(+), 1 deletion(-) diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf index a2e1124ad3..fa1e87a0eb 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -6,7 +6,6 @@ #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 #PBS -l place=vscatter:exclhost -#PBS -l hyper=true #PBS -l debug=true model=gfs diff --git a/parm/config/config.analcalc b/parm/config/config.analcalc index 5866ce5ac6..b908f80af9 100755 --- a/parm/config/config.analcalc +++ b/parm/config/config.analcalc @@ -8,4 +8,8 @@ echo "BEGIN: config.analcalc" # Get task specific resources . $EXPDIR/config.resources analcalc +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + echo "END: config.analcalc" diff --git a/parm/config/config.resources.emc.dyn b/parm/config/config.resources.emc.dyn index 05e3bd3f53..6775e9fe78 100755 --- a/parm/config/config.resources.emc.dyn +++ b/parm/config/config.resources.emc.dyn @@ -152,6 +152,8 @@ elif [ $step = "analcalc" ]; then export npe_analcalc=127 export ntasks=$npe_analcalc export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 export npe_node_analcalc=$npe_node_max if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi diff --git a/parm/config/config.resources.nco.static b/parm/config/config.resources.nco.static index 5e694f069d..3d08f3315d 100755 --- a/parm/config/config.resources.nco.static +++ b/parm/config/config.resources.nco.static @@ -130,6 +130,8 @@ elif [ $step = "analcalc" ]; then export npe_analcalc=127 export ntasks=$npe_analcalc export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 export npe_node_analcalc=$npe_node_max elif [ $step = "analdiag" ]; then From 506eb6419f32f613562e61fca61f7800ac2875bd Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Mon, 16 May 2022 18:32:17 +0000 Subject: [PATCH 28/33] Update EMC tag name in v16.2.0 release notes Hand-off tag to NCO is now EMC-v16.2.0.7 Refs: #399 --- docs/Release_Notes.gfs.v16.2.0.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/Release_Notes.gfs.v16.2.0.md b/docs/Release_Notes.gfs.v16.2.0.md index b54a5e10f3..3a36b47a79 100644 --- a/docs/Release_Notes.gfs.v16.2.0.md +++ b/docs/Release_Notes.gfs.v16.2.0.md @@ -15,7 +15,7 @@ The NOAA VLab and both the NOAA-EMC and NCAR organization spaces on GitHub.com a cd $PACKAGEROOT mkdir gfs.v16.2.0 cd gfs.v16.2.0 -git clone -b EMC-v16.2.0.6 https://github.com/NOAA-EMC/global-workflow.git . +git clone -b EMC-v16.2.0.7 https://github.com/NOAA-EMC/global-workflow.git . cd sorc ./checkout.sh -o ``` From 141883f7707712c923abdaead1e6409608734e94 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 19 May 2022 13:04:32 +0000 Subject: [PATCH 29/33] Update to HOMENHC default path in JGLOBAL_ATMOS_TROPCY_QC_RELOC - NCO updated the default path for HOMENHC and tested it in prod on WCOSS2 during NHC test Refs: #399 --- jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC index c9fd91bcd7..931ec31782 100755 --- a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +++ b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC @@ -74,7 +74,7 @@ export ARCHSYND=${ROTDIR}/syndat # this location is unique, do not change if [ ! -d ${ARCHSYND} ]; then mkdir -p $ARCHSYND; fi export HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep} -export HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep} +export HOMENHC=${HOMENHC:-/lfs/h1/ops/prod/dcom/nhc/atcf/ncep} # JY export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/${envir}} # path to tropical cyclone record database export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}} # path to tropical cyclone record database From 85bbde8bdd9ab081c77c0c1281da2a959addd180 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 19 May 2022 16:00:03 +0000 Subject: [PATCH 30/33] Memory and resource adjustments to some jobs (NCO) - Based on testing on Dogwood after some WCOSS2 updates some memory and resource adjustments were made by NCO. - Memory updates to the gempak, awips, and fbwnd job ecf scripts. - Resource adjustments to remedy oversubscription errors in the post and postsnd jobs. Refs: #399 --- ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf | 2 +- ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf | 2 +- .../post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf | 2 +- .../post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf | 2 +- .../atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf | 2 +- .../gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index b6c9454318..039ca56852 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 -#PBS -l select=1:ncpus=2:mpiprocs=2:mem=1GB +#PBS -l select=1:ncpus=2:mpiprocs=2:mem=4GB #PBS -l place=vscatter #PBS -l debug=true diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf index 3fc5d5bb7d..3423d8d4e4 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:12:00 -#PBS -l select=1:mpiprocs=112:ompthreads=1:ncpus=112 +#PBS -l select=1:mpiprocs=126:ompthreads=1:ncpus=126 #PBS -l place=vscatter:exclhost #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf index 41bcf316ce..cb1ddbe70c 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 -#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l select=1:ncpus=1:mem=3GB #PBS -l place=vscatter #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf index aca8e529e8..b45add609c 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 -#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l select=1:ncpus=1:mem=3GB #PBS -l place=vscatter #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf index 4acbbe2b12..451a1903da 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 -#PBS -l select=2:mpiprocs=20:ompthreads=1:ncpus=20 +#PBS -l select=2:mpiprocs=120:ompthreads=1:ncpus=120 #PBS -l place=vscatter:exclhost #PBS -l debug=true diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf index 7c246cb192..9c930f1426 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 -#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l select=1:ncpus=1:mem=2GB #PBS -l place=vscatter #PBS -l debug=true From 5a154ed40924ef4d63ba5924f43a2207ce83aa16 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 19 May 2022 16:26:19 +0000 Subject: [PATCH 31/33] Update gfspostsnd job resources - oversubscribing The gfspostsnd job was oversubscribing CPUs on WCOSS2 after updates on Dogwood. Updating resources settings to get them matching and working. Refs: #399 --- .../atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf | 2 +- env/WCOSS2.env | 2 +- parm/config/config.resources.emc.dyn | 2 +- parm/config/config.resources.nco.static | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf index 451a1903da..3b4e64f876 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf @@ -4,7 +4,7 @@ #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 -#PBS -l select=2:mpiprocs=120:ompthreads=1:ncpus=120 +#PBS -l select=2:mpiprocs=20:ompthreads=6:ncpus=120 #PBS -l place=vscatter:exclhost #PBS -l debug=true diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 3adfa5a84b..9d14cfdb64 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -182,7 +182,7 @@ elif [ $step = "fv3ic" ]; then elif [ $step = "postsnd" ]; then export NTHREADS_POSTSND=${nth_postsnd:-1} - export APRUN_POSTSND="$launcher -n $npe_postsnd --depth=6 --cpu-bind depth" + export APRUN_POSTSND="$launcher -n $npe_postsnd --depth=$NTHREADS_POSTSND --cpu-bind depth" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} export APRUN_POSTSNDCFP="$launcher -np $npe_postsndcfp $mpmd" diff --git a/parm/config/config.resources.emc.dyn b/parm/config/config.resources.emc.dyn index 6775e9fe78..b9c1492825 100755 --- a/parm/config/config.resources.emc.dyn +++ b/parm/config/config.resources.emc.dyn @@ -381,7 +381,7 @@ elif [ $step = "postsnd" ]; then export wtime_postsnd="02:00:00" export npe_postsnd=40 - export nth_postsnd=1 + export nth_postsnd=6 export npe_node_postsnd=20 export npe_postsndcfp=9 export npe_node_postsndcfp=1 diff --git a/parm/config/config.resources.nco.static b/parm/config/config.resources.nco.static index 3d08f3315d..78b03f2e89 100755 --- a/parm/config/config.resources.nco.static +++ b/parm/config/config.resources.nco.static @@ -318,7 +318,7 @@ elif [ $step = "postsnd" ]; then export wtime_postsnd="02:00:00" export npe_postsnd=40 - export nth_postsnd=1 + export nth_postsnd=6 export npe_node_postsnd=20 export npe_postsndcfp=9 export npe_node_postsndcfp=1 From 37f88a71edc3a2795f7692d5e0216316150f39a9 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Thu, 19 May 2022 21:14:28 +0000 Subject: [PATCH 32/33] Update prior GFS version in v16.2.0 release notes Refs: #665, #798 --- docs/Release_Notes.gfs.v16.2.0.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/Release_Notes.gfs.v16.2.0.md b/docs/Release_Notes.gfs.v16.2.0.md index 3a36b47a79..994282903e 100644 --- a/docs/Release_Notes.gfs.v16.2.0.md +++ b/docs/Release_Notes.gfs.v16.2.0.md @@ -118,7 +118,7 @@ All components updated their codes to build on WCOSS2: FIX CHANGES ----------- -* No changes from GFS v16.1.7 +* No changes from GFS v16.1.8 PARM/CONFIG CHANGES ------------------- @@ -354,7 +354,7 @@ CHANGES TO RESOURCES AND FILE SIZES ----------------------------------- * File sizes - * No change to GFSv16.1.7. + * No change to GFSv16.1.8. * Resource changes to meet operational time windows: * See updated Ecflow scripts for adjusted compute resources for WCOSS2. * Pre-hand-off development testing results: @@ -379,21 +379,21 @@ DISSEMINATION INFORMATION ------------------------- * Where should this output be sent? - * No change from GFS v16.1.7 + * No change from GFS v16.1.8 * Who are the users? - * No change from GFS v16.1.7 + * No change from GFS v16.1.8 * Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * No change from GFS v16.1.7 + * No change from GFS v16.1.8 * Directory changes - * No change from GFS v16.1.7 + * No change from GFS v16.1.8 * File changes - * No change from GFS v16.1.7 + * No change from GFS v16.1.8 HPSS ARCHIVE ------------ -* No change from GFS v16.1.7 +* No change from GFS v16.1.8 JOB DEPENDENCIES AND FLOW DIAGRAM --------------------------------- -* No change from GFS v16.1.7 +* No change from GFS v16.1.8 From a9d64d323caa415d70b030292448fbb409b254d0 Mon Sep 17 00:00:00 2001 From: "kate.friedman" Date: Fri, 20 May 2022 13:03:03 +0000 Subject: [PATCH 33/33] Matching memory updates for awips/gempak in config - Add updated memory values for awips and gempak jobs into resource configs to match similar updates in ecf scripts Refs: #399 --- parm/config/config.resources.emc.dyn | 4 ++-- parm/config/config.resources.nco.static | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/parm/config/config.resources.emc.dyn b/parm/config/config.resources.emc.dyn index b9c1492825..1c5886af70 100755 --- a/parm/config/config.resources.emc.dyn +++ b/parm/config/config.resources.emc.dyn @@ -397,7 +397,7 @@ elif [ $step = "awips" ]; then export npe_awips=1 export npe_node_awips=1 export nth_awips=1 - export memory_awips="1GB" + export memory_awips="3GB" if [[ "$machine" == "WCOSS_DELL_P3" ]]; then export npe_awips=2 export npe_node_awips=2 @@ -412,7 +412,7 @@ elif [ $step = "gempak" ]; then export npe_node_gempak=2 export npe_node_gempak_gfs=28 export nth_gempak=1 - export memory_gempak="1GB" + export memory_gempak="4GB" export memory_gempak_gfs="2GB" else diff --git a/parm/config/config.resources.nco.static b/parm/config/config.resources.nco.static index 78b03f2e89..5d175a8e62 100755 --- a/parm/config/config.resources.nco.static +++ b/parm/config/config.resources.nco.static @@ -329,7 +329,7 @@ elif [ $step = "awips" ]; then export npe_awips=1 export npe_node_awips=1 export nth_awips=1 - export memory_awips="1GB" + export memory_awips="3GB" elif [ $step = "gempak" ]; then @@ -339,7 +339,7 @@ elif [ $step = "gempak" ]; then export npe_node_gempak=2 export npe_node_gempak_gfs=28 export nth_gempak=1 - export memory_gempak="1GB" + export memory_gempak="4GB" export memory_gempak_gfs="2GB" else