From 740daba8d6d34a327199701c1df7d6e10da73ec5 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Fri, 21 Apr 2023 03:11:51 -0400 Subject: [PATCH 1/5] Create fix file issue template (#1495) Create fix_file.md template file for new fix file request issue. This should help formalize the process and document updates. New issue will auto-assign to @KateFriedman-NOAA and @WalterKolczynski-NOAA (the developers with access to make fix files changes). Fixes #1492 --- .github/ISSUE_TEMPLATE/fix_file.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/fix_file.md diff --git a/.github/ISSUE_TEMPLATE/fix_file.md b/.github/ISSUE_TEMPLATE/fix_file.md new file mode 100644 index 0000000000..1e05f0c9df --- /dev/null +++ b/.github/ISSUE_TEMPLATE/fix_file.md @@ -0,0 +1,24 @@ +--- +name: Fix File Update +about: Use this template for adding, updating, or removing fix files from global dataset +title: +labels: Fix Files +assignees: + - KateFriedman-NOAA + - WalterKolczynski-NOAA + +--- + +**Description** + + + + + + +**Tasks** + +- [ ] Discuss needs with global-workflow developer assigned to request. +- [ ] Add/update/remove fix file(s) in fix sets on supported platforms (global-workflow assignee task). +- [ ] Update "Fix File Management" spreadsheet (https://docs.google.com/spreadsheets/d/1BeIvcz6TO3If4YCqkUK-oz_kGS9q2wTjwLS-BBemSEY/edit?usp=sharing). +- [ ] Make related workflow/component updates. From fb236523140b09686a4c2961e0552e7bd5dbf04f Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Fri, 21 Apr 2023 12:40:11 -0400 Subject: [PATCH 2/5] Add new task to post-process marine DA (#1485) The work in this PR is only meant to bring us closer to a viable WCDA system. The refactoring of the marine DA to the new standard introduced by @aerorahul and used by @RussTreadon-NOAA and @CoryMartin-NOAA will be addressed after this [Epic](https://github.com/noaa-emc/gdasapp/issues/416) is resolved. ### Motivation and context This work adds a separate j-job ```JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT ``` that calls a script that will be in the GDASApp for the time being (PR to come once this is merged) and does the following: - prepares the `SOCA` increment for `MOM6` IAU - recursively apply the `SOCA2CICE` change of variable. A mapping from the 2D seaice analysis variable to the CICE6 dynamical and thermodynamic variables. - merge the `Tref` increment from the `NSST` analysis with the `SOCA` increment ### Summary of the change - HPC environment: the new j-job runs a `JEDI` executable twice and one python script. All are serial jobs but the JEDI exec need to be called as an MPI job with 1 pe. - `jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT`, that script point to a ex-script that is not in the GDASApp develop yet. - addition of the option to merge the Tref NSST increment with the MOM6 increment. This is triggered with the `DO_MERGENSST` switch - The new j-job dependency was added, with the option to wait for the surface analysis file `sfcanl.nc` if `do_mergensst` is true. Refs: #1480. Fixes NOAA-EMC/GDASApp/#418 --- env/HERA.env | 10 +++++ env/ORION.env | 10 +++++ jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT | 52 ++++++++++++++++++++++++++ parm/config/config.base.emc.dyn | 1 + parm/config/config.ocnanal | 4 +- parm/config/config.ocnanalchkpt | 11 ++++++ parm/config/config.resources | 21 +++++++++++ ush/forecast_postdet.sh | 7 +++- workflow/applications.py | 6 ++- workflow/rocoto/workflow_tasks.py | 24 +++++++++++- 10 files changed, 139 insertions(+), 7 deletions(-) create mode 100755 jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT create mode 100644 parm/config/config.ocnanalchkpt diff --git a/env/HERA.env b/env/HERA.env index 7960f604ba..c59ba72298 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -96,6 +96,16 @@ elif [[ "${step}" = "ocnanalrun" ]]; then [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + nth_max=$((npe_node_max / npe_node_ocnanalchkpt)) + + export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/env/ORION.env b/env/ORION.env index 1ef58c82bc..8911b70d29 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -98,6 +98,16 @@ elif [[ "${step}" = "ocnanalrun" ]]; then [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalchkpt)) + + export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT new file mode 100755 index 0000000000..f4b8712576 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT @@ -0,0 +1,52 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalchkpt" -c "base ocnanal ocnanalchkpt" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export GDATE +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export GPREFIX="${GDUMP}.t${gcyc}z." +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +export APREFIX="${CDUMP}.t${cyc}z." + +export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index c58c740ca8..bfc68adf7f 100644 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -311,6 +311,7 @@ export imp_physics=@IMP_PHYSICS@ export DO_JEDIVAR="NO" export DO_JEDIENS="NO" export DO_JEDIOCNVAR="NO" +export DO_MERGENSST="NO" # Hybrid related export DOHYBVAR="@DOHYBVAR@" diff --git a/parm/config/config.ocnanal b/parm/config/config.ocnanal index c8d821b86d..36519c7f35 100644 --- a/parm/config/config.ocnanal +++ b/parm/config/config.ocnanal @@ -1,7 +1,7 @@ #!/bin/bash ########## config.ocnanal ########## -# configuration common to all atm analysis tasks +# configuration common to all ocean analysis tasks echo "BEGIN: config.ocnanal" @@ -15,7 +15,7 @@ export SOCA_VARS=tocn,socn,ssh export SABER_BLOCKS_YAML=@SABER_BLOCKS_YAML@ export SOCA_NINNER=@SOCA_NINNER@ export CASE_ANL=@CASE_ANL@ -export DOMAIN_STACK_SIZE=2000000 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin # R2D2 diff --git a/parm/config/config.ocnanalchkpt b/parm/config/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/parm/config/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/parm/config/config.resources b/parm/config/config.resources index b1ca57e5cb..dbf9cc63e3 100644 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -19,6 +19,8 @@ if [[ $# -ne 1 ]]; then echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak" echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost" + exit 1 fi @@ -327,6 +329,25 @@ elif [[ "${step}" = "ocnanalrun" ]]; then npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) export npe_node_ocnanalrun +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + elif [[ "${step}" = "ocnanalpost" ]]; then export wtime_ocnanalpost="00:30:00" diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index f80a7440cf..13485bbd1e 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -1001,7 +1001,12 @@ CICE_postdet() { # Copy/link CICE IC to DATA if [[ "${warm_start}" = ".true." ]]; then - $NLN "${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ice/RESTART/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + cice_ana="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ice/RESTART/${PDY}.${cyc}0000.cice_model_anl.res.nc" + if [[ -e ${cice_ana} ]]; then + ${NLN} "${cice_ana}" "${DATA}/cice_model.res.nc" + else + ${NLN} "${ROTDIR}/${CDUMP}.${gPDY}/${gcyc}/ice/RESTART/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + fi else # cold start are typically SIS2 restarts obtained from somewhere else e.g. CPC $NLN "${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ice/RESTART/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" fi diff --git a/workflow/applications.py b/workflow/applications.py index 82ef576ed9..fc6dbd6d05 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -110,6 +110,7 @@ def __init__(self, conf: Configuration) -> None: self.do_jediatmvar = _base.get('DO_JEDIVAR', False) self.do_jediens = _base.get('DO_JEDIENS', False) self.do_jediocnvar = _base.get('DO_JEDIOCNVAR', False) + self.do_mergensst = _base.get('DO_MERGENSST', False) self.do_hpssarch = _base.get('HPSSARCH', False) @@ -183,7 +184,7 @@ def _cycled_configs(self): configs += ['anal', 'analdiag'] if self.do_jediocnvar: - configs += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost'] + configs += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost'] if self.do_ocean: configs += ['ocnpost'] @@ -360,7 +361,8 @@ def _get_cycled_task_names(self): gdas_gfs_common_tasks_before_fcst += ['anal'] if self.do_jediocnvar: - gdas_gfs_common_tasks_before_fcst += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost'] + gdas_gfs_common_tasks_before_fcst += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', + 'ocnanalchkpt', 'ocnanalpost'] gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 2cc50cde86..3142605363 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -13,7 +13,7 @@ class Tasks: VALID_TASKS = ['aerosol_init', 'coupled_ic', 'getic', 'init', 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'gldas', 'arch', 'atmanlinit', 'atmanlrun', 'atmanlfinal', - 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalpost', + 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanalprep', 'atmensanalrun', 'atmensanalpost', @@ -533,11 +533,31 @@ def ocnanalrun(self): return task - def ocnanalpost(self): + def ocnanalchkpt(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalrun'} deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_mergensst: + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.sfcanl.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalchkpt') + task = create_wf_task('ocnanalchkpt', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalchkpt'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('ocnanalpost') From 7db70496063fe32928cacb9790e45a1e987a3510 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Fri, 21 Apr 2023 12:42:00 -0400 Subject: [PATCH 3/5] Added Fit2Obs to S4. #1489 (#1497) Adds Fit2Obs support for S4 by adding the module use/load commands to the module_base.s4.lua modulefile. Fixes #1489. --- modulefiles/module_base.s4.lua | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua index 62170bbeb6..35c00d83c6 100644 --- a/modulefiles/module_base.s4.lua +++ b/modulefiles/module_base.s4.lua @@ -31,4 +31,7 @@ setenv("WGRIB2","wgrib2") prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v1.0.1/modulefiles")) load(pathJoin("prepobs", "1.0.1")) +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + whatis("Description: GFS run environment") From 587e469a1be5e278326fc0cbceefedc90caf75bf Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Fri, 21 Apr 2023 13:28:11 -0400 Subject: [PATCH 4/5] Refactor UFS-DA ATM ens component to use python g-w (#1373) This PR contains UFS-DA ATM ens changes originally in PR #1354. Below is a list of changes in this PR - rename UFS-DA ATM ens jobs atmensanalprep, atmensanalrun, and atmensanalpost as atmensanlinit, atmensanlrun, and atmensanlfinal, respectively - replace UFS-DA ATM ens shell scripts with python scripts - rename UFS-DA ATM ens j-jobs consistent with initialize, run, and finalize functions. Update j-jobs to execute python scripts instead of shell scripts - rename UFS-DA ATM ens rocoto jobs to be consistent with initialize, run, and finalize functions. Update jobs to set python paths and execute renamed j-jobs - update rocoto workflow generation to new names for UFS-DA ATM ens jobs - update UFS-DA ATM ens job names in machine dependent env files to new job names - rename UFS-DA ATM ens configuration files consistent with change in job names - add python class for UFS-DA ATM ens analysis - unify JEDIEXE link for UFS-DA Aerosol, ATM, and ENS - properly set `cycledefs` for `gfsatmanlinit` - remove unused `FV3JEDI_FIX` from atmanl and atmensanl config The above changes are part of a larger g-w effort to transition from shell scripts to python. UFS-DA Aerosol was the first GDASApp system to be converted. PR #1372 converted UFS-DA atmospheric variational DA to the python based approach. This PR converts converts UFS-DA atmospheric local ensemble DA to the python based approach. Fixes #1313 Depends (in part) on #1370 and #1372 and NOAA-EMC/GDASApp#388 --- env/CONTAINER.env | 2 +- env/HERA.env | 14 +- env/JET.env | 16 +- env/ORION.env | 16 +- env/S4.env | 16 +- env/WCOSS2.env | 16 +- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST | 66 ---- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP | 66 ---- jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN | 66 ---- jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE | 50 +++ jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE | 43 +++ jobs/JGLOBAL_ATMENS_ANALYSIS_RUN | 38 +++ jobs/rocoto/atmensanalpost.sh | 20 -- jobs/rocoto/atmensanalprep.sh | 20 -- jobs/rocoto/atmensanalrun.sh | 20 -- jobs/rocoto/atmensanlfinal.sh | 23 ++ jobs/rocoto/atmensanlinit.sh | 24 ++ jobs/rocoto/atmensanlrun.sh | 24 ++ parm/config/config.aeroanl | 2 +- parm/config/config.atmanl | 3 +- parm/config/config.atmensanal | 24 -- parm/config/config.atmensanalpost | 10 - parm/config/config.atmensanalprep | 10 - parm/config/config.atmensanalrun | 14 - parm/config/config.atmensanl | 22 ++ parm/config/config.atmensanlfinal | 10 + parm/config/config.atmensanlinit | 10 + parm/config/config.atmensanlrun | 11 + parm/config/config.resources | 54 +-- scripts/exgdas_global_atmos_ensanal_post.py | 44 --- scripts/exgdas_global_atmos_ensanal_run.sh | 167 ---------- scripts/exglobal_atmens_analysis_finalize.py | 25 ++ .../exglobal_atmens_analysis_initialize.py | 25 ++ scripts/exglobal_atmens_analysis_run.py | 23 ++ ush/python/pygfs/task/aero_analysis.py | 9 - ush/python/pygfs/task/analysis.py | 30 +- ush/python/pygfs/task/atm_analysis.py | 9 - ush/python/pygfs/task/atmens_analysis.py | 312 ++++++++++++++++++ workflow/applications.py | 10 +- workflow/rocoto/workflow_tasks.py | 60 ++-- 40 files changed, 761 insertions(+), 663 deletions(-) delete mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST delete mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP delete mode 100755 jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN create mode 100755 jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE create mode 100755 jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE create mode 100755 jobs/JGLOBAL_ATMENS_ANALYSIS_RUN delete mode 100755 jobs/rocoto/atmensanalpost.sh delete mode 100755 jobs/rocoto/atmensanalprep.sh delete mode 100755 jobs/rocoto/atmensanalrun.sh create mode 100755 jobs/rocoto/atmensanlfinal.sh create mode 100755 jobs/rocoto/atmensanlinit.sh create mode 100755 jobs/rocoto/atmensanlrun.sh delete mode 100644 parm/config/config.atmensanal delete mode 100644 parm/config/config.atmensanalpost delete mode 100644 parm/config/config.atmensanalprep delete mode 100644 parm/config/config.atmensanalrun create mode 100755 parm/config/config.atmensanl create mode 100755 parm/config/config.atmensanlfinal create mode 100755 parm/config/config.atmensanlinit create mode 100755 parm/config/config.atmensanlrun delete mode 100755 scripts/exgdas_global_atmos_ensanal_post.py delete mode 100755 scripts/exgdas_global_atmos_ensanal_run.sh create mode 100755 scripts/exglobal_atmens_analysis_finalize.py create mode 100755 scripts/exglobal_atmens_analysis_initialize.py create mode 100755 scripts/exglobal_atmens_analysis_run.py create mode 100644 ush/python/pygfs/task/atmens_analysis.py diff --git a/env/CONTAINER.env b/env/CONTAINER.env index d06ca91a9b..378b046944 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" diff --git a/env/HERA.env b/env/HERA.env index c59ba72298..f97af13d95 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,15 +56,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/JET.env b/env/JET.env index e5fd85aaa3..1632e1cc0e 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -55,17 +55,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/ORION.env b/env/ORION.env index 8911b70d29..43450bffcf 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -56,17 +56,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/S4.env b/env/S4.env index 9a62d515a4..c69a845cdf 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" @@ -54,17 +54,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 8110b94b5b..95c3f72fe4 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,7 +4,7 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanalrun aeroanlrun" + echo "atmanlrun atmensanlrun aeroanlrun" echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" @@ -43,17 +43,13 @@ elif [[ "${step}" = "atmanlrun" ]]; then [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" -elif [[ "${step}" = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_atmensanalrun)) + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} - [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} - export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun}" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" elif [[ "${step}" = "aeroanlrun" ]]; then diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST deleted file mode 100755 index e1d53b552e..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalpost" -c "base atmensanal atmensanalpost" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPOSTPY:-${HOMEgfs}/scripts/exgdas_global_atmos_ensanal_post.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP deleted file mode 100755 index 7b3ecee7ca..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalprep" -c "base atmensanal atmensanalprep" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/scripts/exgdas_global_atmos_analysis_prep.py} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN b/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN deleted file mode 100755 index 45368d51ff..0000000000 --- a/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanalrun" -c "base atmensanal atmensanalrun" - - -############################################## -# Set variables used in the script -############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT="atmos" - -############################################## -# Begin JOB SPECIFIC work -############################################## - -export GDATE=$(date +%Y%m%d%H -d "${CDATE:0:8} ${CDATE:8:2} - ${assim_freq} hours") -gPDY=${GDATE:0:8} -export gcyc=${GDATE:8:2} -export GDUMP=${GDUMP:-"gdas"} - -export OPREFIX="${CDUMP}.t${cyc}z." -export GPREFIX="${GDUMP}.t${gcyc}z." -export APREFIX="${CDUMP}.t${cyc}z." - -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT_ENS=${COMOUT_ENS:-${ROTDIR}/enkf${CDUMP}.${PDY}/${cyc}/${COMPONENT}} - -mkdir -p ${COMOUT} -mkdir -p ${COMOUT_ENS} - -# COMIN_GES and COMIN_GES_ENS are used in script -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" -export COMIN_GES="${ROTDIR}/${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" -export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${gPDY}/${gcyc}/${COMPONENT}" - -# Add UFSDA to PYTHONPATH -export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${PYTHONPATH} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASRUNSH:-${HOMEgfs}/scripts/exgdas_global_atmos_ensanal_run.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [ -e "${pgmout}" ] ; then - cat ${pgmout} -fi - -########################################## -# Remove the Temporary working directory -########################################## -cd ${DATAROOT} -[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} - -exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..d40d79cf78 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE @@ -0,0 +1,50 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl atmensanlfinal" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSFINALPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..dca7d0ffc6 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atmensanlinit" + +############################################## +# Set variables used in the script +############################################## +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}} +mkdir -p "${COMOUT}" + +# COMIN_GES and COMIN_GES_ENS are used in script +export COMIN_GES="${ROTDIR}/${GDUMP}.${GDATE:0:8}/${GDATE:8:2}/atmos" +export COMIN_GES_ENS="${ROTDIR}/enkf${GDUMP}.${GDATE:0:8}/${GDATE:8:2}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSINITPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN new file mode 100755 index 0000000000..5a267f197a --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN @@ -0,0 +1,38 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atmensanlrun" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}} +mkdir -p "${COMOUT}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSRUNSH:-${HOMEgfs}/scripts/exglobal_atmens_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/rocoto/atmensanalpost.sh b/jobs/rocoto/atmensanalpost.sh deleted file mode 100755 index 91ac2d6212..0000000000 --- a/jobs/rocoto/atmensanalpost.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalpost" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_POST -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanalprep.sh b/jobs/rocoto/atmensanalprep.sh deleted file mode 100755 index b54a1b464e..0000000000 --- a/jobs/rocoto/atmensanalprep.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalprep" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_PREP -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanalrun.sh b/jobs/rocoto/atmensanalrun.sh deleted file mode 100755 index a2509a310e..0000000000 --- a/jobs/rocoto/atmensanalrun.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash - -export STRICT="NO" -source "${HOMEgfs}/ush/preamble.sh" -export STRICT="YES" - -############################################################### -# Source UFSDA workflow modules -. ${HOMEgfs}/ush/load_ufsda_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="atmensanalrun" -export jobid="${job}.$$" - -############################################################### -# Execute the JJOB -${HOMEgfs}/jobs/JGDAS_GLOBAL_ATMOS_ENSANAL_RUN -status=$? -exit ${status} diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh new file mode 100755 index 0000000000..838e9712f8 --- /dev/null +++ b/jobs/rocoto/atmensanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh new file mode 100755 index 0000000000..0ab78a1083 --- /dev/null +++ b/jobs/rocoto/atmensanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlrun.sh new file mode 100755 index 0000000000..91efdb3768 --- /dev/null +++ b/jobs/rocoto/atmensanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/parm/config/config.aeroanl b/parm/config/config.aeroanl index 3b9a9971f4..41d63f8549 100644 --- a/parm/config/config.aeroanl +++ b/parm/config/config.aeroanl @@ -18,7 +18,7 @@ export BERROR_DATE="20160630.000000" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIVAREXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x export crtm_VERSION="2.3.0" echo "END: config.aeroanl" diff --git a/parm/config/config.atmanl b/parm/config/config.atmanl index 719018d1fd..c0cd9e6733 100644 --- a/parm/config/config.atmanl +++ b/parm/config/config.atmanl @@ -10,7 +10,6 @@ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yam export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml export STATICB_TYPE="gsibec" export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml -export FV3JEDI_FIX=${HOMEgfs}/fix/gdas export INTERP_METHOD='barycentric' export layout_x=1 @@ -19,7 +18,7 @@ export layout_y=1 export io_layout_x=1 export io_layout_y=1 -export JEDIVAREXE=${HOMEgfs}/exec/fv3jedi_var.x +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x export crtm_VERSION="2.3.0" echo "END: config.atmanl" diff --git a/parm/config/config.atmensanal b/parm/config/config.atmensanal deleted file mode 100644 index 2c939f0d84..0000000000 --- a/parm/config/config.atmensanal +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanal ########## -# configuration common to all atm atmensanal analysis tasks - -echo "BEGIN: config.atmensanal" - -export OBS_YAML_DIR=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/config/ -export OBS_LIST=$HOMEgfs/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml -export BERROR_YAML=$HOMEgfs/sorc/gdas.cd/parm/atm/berror/hybvar_bump.yaml -export ATMENSYAML=$HOMEgfs/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml -export FV3JEDI_FIX=$HOMEgfs/fix/gdas -export R2D2_OBS_DB='ufsda_test' -export R2D2_OBS_DUMP='oper_gdas' -export R2D2_OBS_SRC='ncdiag' -export R2D2_BC_SRC='gsi' -export R2D2_BC_DUMP='oper_gdas' -export R2D2_ARCH_DB='local' -export INTERP_METHOD='barycentric' - -export io_layout_x=1 # hardwired to 1,1 in yamltools.py -export io_layout_y=1 - -echo "END: config.atmensanal" diff --git a/parm/config/config.atmensanalpost b/parm/config/config.atmensanalpost deleted file mode 100644 index f79ee5b507..0000000000 --- a/parm/config/config.atmensanalpost +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalpost ########## -# Post Atm Analysis specific - -echo "BEGIN: config.atmensanalpost" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalpost -echo "END: config.atmensanalpost" diff --git a/parm/config/config.atmensanalprep b/parm/config/config.atmensanalprep deleted file mode 100644 index b719b9ac6c..0000000000 --- a/parm/config/config.atmensanalprep +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalprep ########## -# Pre Atm Analysis specific - -echo "BEGIN: config.atmensanalprep" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalprep -echo "END: config.atmensanalprep" diff --git a/parm/config/config.atmensanalrun b/parm/config/config.atmensanalrun deleted file mode 100644 index aeb59d1805..0000000000 --- a/parm/config/config.atmensanalrun +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -x - -########## config.atmensanalrun ########## -# Atm LETKFs specific - -echo "BEGIN: config.atmensanalrun" - -# Get task specific resources -. $EXPDIR/config.resources atmensanalrun - -# Task specific variables -export JEDIENSEXE=$HOMEgfs/exec/fv3jedi_letkf.x - -echo "END: config.atmensanalrun" diff --git a/parm/config/config.atmensanl b/parm/config/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/parm/config/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/parm/config/config.atmensanlfinal b/parm/config/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/parm/config/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/parm/config/config.atmensanlinit b/parm/config/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/parm/config/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/parm/config/config.atmensanlrun b/parm/config/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/parm/config/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/parm/config/config.resources b/parm/config/config.resources index dbf9cc63e3..96666cc60d 100644 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -10,7 +10,7 @@ if [[ $# -ne 1 ]]; then echo "argument can be any one of the following:" echo "getic init coupled_ic aerosol_init" echo "atmanlinit atmanlrun atmanlfinal" - echo "atmensanalprep atmensanalrun atmensanalpost" + echo "atmensanlinit atmensanlrun atmensanlfinal" echo "aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" @@ -740,39 +740,39 @@ elif [[ ${step} = "coupled_ic" ]]; then export nth_coupled_ic=1 export is_exclusive=True -elif [[ ${step} = "atmensanalprep" ]]; then +elif [[ "${step}" = "atmensanlinit" ]]; then - export wtime_atmensanalprep="00:10:00" - export npe_atmensanalprep=1 - export nth_atmensanalprep=1 - npe_node_atmensanalprep=$(echo "${npe_node_max} / ${nth_atmensanalprep}" | bc) - export npe_node_atmensanalprep - export is_exclusive=True + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" -elif [[ ${step} = "atmensanalrun" ]]; then +elif [[ "${step}" = "atmensanlrun" ]]; then # make below case dependent later - export layout_x=2 - export layout_y=3 - - export wtime_atmensanalrun="00:30:00" - npe_atmensanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanalrun - npe_atmensanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanalrun_gfs - export nth_atmensanalrun=1 - export nth_atmensanalrun_gfs=${nth_atmensanalrun} + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun export is_exclusive=True - npe_node_atmensanalrun=$(echo "${npe_node_max} / ${nth_atmensanalrun}" | bc) - export npe_node_atmensanalrun -elif [[ ${step} = "atmensanalpost" ]]; then +elif [[ "${step}" = "atmensanlfinal" ]]; then - export wtime_atmensanalpost="00:30:00" - export npe_atmensanalpost=${npe_node_max} - export nth_atmensanalpost=1 - npe_node_atmensanalpost=$(echo "${npe_node_max} / ${nth_atmensanalpost}" | bc) - export npe_node_atmensanalpost + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal export is_exclusive=True elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then diff --git a/scripts/exgdas_global_atmos_ensanal_post.py b/scripts/exgdas_global_atmos_ensanal_post.py deleted file mode 100755 index 6c5384953f..0000000000 --- a/scripts/exgdas_global_atmos_ensanal_post.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 -################################################################################ -# UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_post.py -# Script description: Post atmospheric analysis script. -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-29 -# -# Abstract: This script runs after the atmospheric analysis and -# archives each diagnostic file into the R2D2 local user database. -# -# $Id$ -# -# Attributes: -# Language: Python3 -# -################################################################################ - -# import os and sys to add ush to path -import logging -import os -import sys - -# set up logger -logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - -# get absolute path of ush/ directory either from env or relative to this file -my_dir = os.path.dirname(__file__) -my_home = os.path.dirname(os.path.dirname(my_dir)) -gdas_home = os.path.join(os.environ['HOMEgfs'], 'sorc', 'gdas.cd') -sys.path.append(os.path.join(os.getenv('HOMEgfs', my_home), 'ush')) -logging.info(f"sys.path={sys.path}") - -# import UFSDA utilities -import ufsda - -# get configuration based on environment variables -config = ufsda.misc_utils.get_env_config(component='atm') -config['DIAG_DIR'] = os.path.join(os.environ['COMOUT_ENS'], 'diags') -config['provider'] = 'ncdiag_lgetkf' - -# use R2D2 to archive hofx files -ufsda.archive.atm_diags(config) diff --git a/scripts/exgdas_global_atmos_ensanal_run.sh b/scripts/exgdas_global_atmos_ensanal_run.sh deleted file mode 100755 index 2e87573eda..0000000000 --- a/scripts/exgdas_global_atmos_ensanal_run.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/bin/bash -################################################################################ -#### UNIX Script Documentation Block -# . . -# Script name: exgdas_global_atmos_analysis_run.sh -# Script description: Runs the global atmospheric analysis with FV3-JEDI -# -# Author: Cory Martin Org: NCEP/EMC Date: 2021-12-28 -# -# Abstract: This script makes a global model atmospheric analysis using FV3-JEDI -# and also (for now) updates increment files using a python ush utility -# -# $Id$ -# -# Attributes: -# Language: POSIX shell -# Machine: Orion -# -################################################################################ - -# Set environment. -source "$HOMEgfs/ush/preamble.sh" - -# Directories -pwd=$(pwd) - -# Utilities -export NLN=${NLN:-"/bin/ln -sf"} -export INCPY=${INCPY:-"$HOMEgfs/sorc/gdas.cd/ush/jediinc2fv3.py"} -export GENYAML=${GENYAML:-"$HOMEgfs/sorc/gdas.cd/ush/genYAML"} -export GETOBSYAML=${GETOBSYAML:-"$HOMEgfs/sorc/gdas.cd/ush/get_obs_list.py"} - -################################################################################ -# make subdirectories -mkdir -p $DATA/fv3jedi -mkdir -p $DATA/obs -mkdir -p $DATA/diags -mkdir -p $DATA/bc -mkdir -p $DATA/anl - -################################################################################ -# generate YAML file -cat > $DATA/temp.yaml << EOF -template: ${ATMENSYAML} -output: $DATA/fv3jedi_ens.yaml -config: - atm: true - BERROR_YAML: $BERROR_YAML - OBS_DIR: obs - DIAG_DIR: diags - CRTM_COEFF_DIR: crtm - BIAS_IN_DIR: obs - BIAS_OUT_DIR: bc - OBS_PREFIX: $OPREFIX - BIAS_PREFIX: $GPREFIX - OBS_LIST: $OBS_LIST - OBS_YAML_DIR: $OBS_YAML_DIR - BKG_DIR: bkg - fv3jedi_staticb_dir: berror - fv3jedi_fix_dir: fv3jedi - fv3jedi_fieldset_dir: fv3jedi - fv3jedi_fieldmetadata_dir: fv3jedi - OBS_DATE: '$CDATE' - BIAS_DATE: '$GDATE' - ANL_DIR: anl/ - NMEM_ENKF: '$NMEM_ENKF' - INTERP_METHOD: '$INTERP_METHOD' -EOF -$GENYAML --config $DATA/temp.yaml - -################################################################################ -# link observations to $DATA -$GETOBSYAML --config $DATA/fv3jedi_ens.yaml --output $DATA/${OPREFIX}obsspace_list -files=$(cat $DATA/${OPREFIX}obsspace_list) -for file in $files; do - basefile=$(basename $file) - $NLN $COMIN/$basefile $DATA/obs/$basefile -done - -# link backgrounds to $DATA -# linking FMS RESTART files for now -# change to (or make optional) for cube sphere history later -##$NLN ${COMIN_GES}/RESTART $DATA/bkg - - -# Link ensemble backgrounds to $DATA. Make directories -# for ensemble output -if [ $DOHYBVAR = "YES" -o $DO_JEDIENS = "YES" ]; then - mkdir -p $DATA/bkg - for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - mkdir -p $DATA/bkg/$memchar - $NLN ${COMIN_GES_ENS}/$memchar/RESTART $DATA/bkg/$memchar - mkdir -p $DATA/anl/$memchar - done -fi - -################################################################################ -# link fix files to $DATA -# static B -##CASE_BERROR=${CASE_BERROR:-${CASE_ANL:-$CASE}} -##$NLN $FV3JEDI_FIX/bump/$CASE_BERROR/ $DATA/berror - -# vertical coordinate -LAYERS=$(expr $LEVS - 1) -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/akbk${LAYERS}.nc4 $DATA/fv3jedi/akbk.nc4 - -# other FV3-JEDI fix files -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/fmsmpp.nml $DATA/fv3jedi/fmsmpp.nml -$NLN $FV3JEDI_FIX/fv3jedi/fv3files/field_table_gfdl $DATA/fv3jedi/field_table - -# fieldmetadata -$NLN $FV3JEDI_FIX/fv3jedi/fieldmetadata/gfs-restart.yaml $DATA/fv3jedi/gfs-restart.yaml - -# fieldsets -fieldsets="dynamics.yaml ufo.yaml" -for fieldset in $fieldsets; do - $NLN $FV3JEDI_FIX/fv3jedi/fieldsets/$fieldset $DATA/fv3jedi/$fieldset -done - -# CRTM coeffs -${NLN} "${FV3JEDI_FIX}/crtm/2.3.0" "${DATA}/crtm" - -# Link executable to $DATA -$NLN $JEDIENSEXE $DATA/fv3jedi_ens.x - -################################################################################ -# run executable -export OMP_NUM_THREADS=$NTHREADS_ATMENSANAL -export pgm=$JEDIENSEXE -. prep_step -$APRUN_ATMENSANAL $DATA/fv3jedi_ens.x $DATA/fv3jedi_ens.yaml 1>&1 2>&2 -export err=$?; err_chk - -################################################################################ -# translate FV3-JEDI increment to FV3 readable format -for imem in $(seq 1 $NMEM_ENKF); do - memchar="mem"$(printf %03i $imem) - atmges_fv3=$COMIN_GES_ENS/$memchar/${GPREFIX}atmf006.nc - atminc_jedi=$DATA/anl/$memchar/atminc.${PDY}_${cyc}0000z.nc4 - atminc_fv3=$COMOUT_ENS/$memchar/${CDUMP}.${cycle}.atminc.nc - mkdir -p $COMOUT_ENS/$memchar - if [ -s $atminc_jedi ]; then - $INCPY $atmges_fv3 $atminc_jedi $atminc_fv3 - export err=$? - else - echo "***WARNING*** missing $atminc_jedi ABORT" - export err=99 - fi - err_chk -done - -################################################################################ -# Create log file noting creating of analysis increment file -echo "$CDUMP $CDATE atminc done at $(date)" > $COMOUT_ENS/${CDUMP}.${cycle}.loginc.txt - -################################################################################ -# Copy diags and YAML to $COMOUT -cp -r ${DATA}/fv3jedi_ens.yaml ${COMOUT_ENS}/${CDUMP}.${cycle}.fv3jedi_ens.yaml -cp -rf "${DATA}/diags" "${COMOUT_ENS}/" - - -################################################################################ - -exit ${err} - -################################################################################ diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py new file mode 100755 index 0000000000..5271c5c486 --- /dev/null +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exgdas_global_atmens_analysis_finalize.py +# This script creates an AtmEnsAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.finalize() diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py new file mode 100755 index 0000000000..97326ddf3d --- /dev/null +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exgdas_global_atmens_analysis_initialize.py +# This script creates an AtmEnsAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py new file mode 100755 index 0000000000..2de95e850d --- /dev/null +++ b/scripts/exglobal_atmens_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exgdas_global_atmens_analysis_run.py +# This script creates an AtmEnsAnalysis object +# and runs the execute method +# which executes the global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.execute() diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index e21284dc91..8c692e22d6 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -68,7 +68,6 @@ def initialize(self: Analysis) -> None: - staging B error files - staging model backgrounds - generating a YAML file for the JEDI executable - - linking the JEDI executable (TODO make it copyable, requires JEDI fix) - creating output directories """ super().initialize() @@ -99,14 +98,6 @@ def initialize(self: Analysis) -> None: save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") - # link executable to DATA/ directory - exe_src = self.task_config['JEDIVAREXE'] - logger.debug(f"Link executable {exe_src} to DATA/") # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - exe_dest = os.path.join(self.task_config['DATA'], os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 6eeeb34996..94a93b74f4 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -5,7 +5,7 @@ from netCDF4 import Dataset from typing import List, Dict, Any -from pygw.yaml_file import YAMLFile, parse_j2yaml +from pygw.yaml_file import YAMLFile, parse_j2yaml, parse_yamltmpl from pygw.file_utils import FileHandler from pygw.template import Template, TemplateConstants from pygw.logger import logit @@ -36,6 +36,9 @@ def initialize(self) -> None: bias_dict = self.get_bias_dict() FileHandler(bias_dict).sync() + # link jedi executable to run directory + self.link_jediexe() + @logit(logger) def get_obs_dict(self: Task) -> Dict[str, Any]: """Compile a dictionary of observation files to copy @@ -171,3 +174,28 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: """ berror_dict = {'foo': 'bar'} return berror_dict + + @logit(logger) + def link_jediexe(self: Task) -> None: + """Compile a dictionary of background error files to copy + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.JEDIEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.debug(f"Link executable {exe_src} to DATA/") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index a632e318d9..045839edfd 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -67,7 +67,6 @@ def initialize(self: Analysis) -> None: - staging B error files - staging model backgrounds - generating a YAML file for the JEDI executable - - linking the JEDI executable (TODO make it copyable, requires JEDI fix) - creating output directories """ super().initialize() @@ -98,14 +97,6 @@ def initialize(self: Analysis) -> None: save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") - # link executable to DATA/ directory - exe_src = self.task_config.JEDIVAREXE - logger.debug(f"Link executable {exe_src} to DATA/") # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) - if os.path.exists(exe_dest): - rm_p(exe_dest) - os.symlink(exe_src, exe_dest) - # need output dir for diags and anl logger.debug("Create empty output [anl, diags] directories to receive output from executable") newdirs = [ diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py new file mode 100644 index 0000000000..636129d3ee --- /dev/null +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmEnsAnalysis(Analysis): + """ + Class for global atmens analysis tasks + """ + @logit(logger, name="AtmEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE_ENKF[1:]) + _res_anl = int(self.config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'comin_ges_atm': self.config.COMIN_GES, + 'comin_ges_atmens': self.config.COMIN_GES_ENS, + 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atmens analysis + + This method will initialize a global atmens analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + super().initialize() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + + # generate ensemble da YAML file + logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") + ensda_yaml = parse_j2yaml(self.task_config.ATMENSYAML, self.task_config) + save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + # Make directories for member analysis files + anldir = [] + for imem in range(1, self.task_config.NMEM_ENKF + 1): + memchar = f"mem{imem:03d}" + anldir.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}')) + FileHandler({'mkdir': anldir}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + """Execute a global atmens analysis + + This method will execute a global atmens analysis using JEDI. + This includes: + - changing to the run directory + - running the global atmens analysis executable + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atmens analysis + + This method will finalize a global atmens analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - write UFS model readable atm incrment file + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # ---- tar up diags + # path of output tar statfile + atmensstat = os.path.join(self.task_config.COMOUT, f"{self.task_config.APREFIX}atmensstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmensstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmensstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COMOUT}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + dest = os.path.join(self.task_config.COMOUT, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COMOUT], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) + that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + # loop over ensemble members + dirlist = [] + bkglist = [] + for imem in range(1, task_config.NMEM_ENKF + 1): + memchar = f"mem{imem:03d}" + + # accumulate directory list for member restart files + dirlist.append(os.path.join(task_config.DATA, 'bkg', memchar)) + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = os.path.join(task_config.comin_ges_atmens, memchar, 'atmos/RESTART') + run_dir = os.path.join(task_config.DATA, 'bkg', memchar) + + # atmens DA needs coupler + basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(task_config.DATA, 'bkg', memchar, basename)]) + + # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': dirlist, + 'copy': bkglist, + } + + return bkg_dict + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + for imem in range(1, self.task_config.NMEM_ENKF + 1): + memchar = f"mem{imem:03d}" + + # make output directory for member increment + incdir = [ + os.path.join(self.task_config.COMOUT, memchar, 'atmos') + ] + FileHandler({'mkdir': incdir}).sync() + + # rewrite UFS-DA atmens increments + atmges_fv3 = os.path.join(self.task_config.COMIN_GES_ENS, memchar, 'atmos', + f"{self.task_config.CDUMP}.t{self.runtime_config.previous_cycle.hour:02d}z.atmf006.nc") + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(self.task_config.COMOUT, memchar, 'atmos', + f"{self.task_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atminc.nc") + + # Execute incpy to create the UFS model atm increment file + # TODO: use MPMD or parallelize with mpi4py + # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656 + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') diff --git a/workflow/applications.py b/workflow/applications.py index fc6dbd6d05..ff4161bded 100644 --- a/workflow/applications.py +++ b/workflow/applications.py @@ -108,7 +108,7 @@ def __init__(self, conf: Configuration) -> None: self.do_fit2obs = _base.get('DO_FIT2OBS', True) self.do_metp = _base.get('DO_METP', False) self.do_jediatmvar = _base.get('DO_JEDIVAR', False) - self.do_jediens = _base.get('DO_JEDIENS', False) + self.do_jediatmens = _base.get('DO_JEDIENS', False) self.do_jediocnvar = _base.get('DO_JEDIOCNVAR', False) self.do_mergensst = _base.get('DO_MERGENSST', False) @@ -194,8 +194,8 @@ def _cycled_configs(self): configs += ['gldas'] if self.do_hybvar: - if self.do_jediens: - configs += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost'] + if self.do_jediatmens: + configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] else: configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] @@ -377,8 +377,8 @@ def _get_cycled_task_names(self): hybrid_tasks = [] hybrid_after_eupd_tasks = [] if self.do_hybvar: - if self.do_jediens: - hybrid_tasks += ['atmensanalprep', 'atmensanalrun', 'atmensanalpost', 'echgres'] + if self.do_jediatmens: + hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 3142605363..ab915c1f2a 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -16,7 +16,7 @@ class Tasks: 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', - 'atmensanalprep', 'atmensanalrun', 'atmensanalpost', + 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', @@ -397,6 +397,7 @@ def analdiag(self): return task def atmanlinit(self): + deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} deps.append(rocoto.add_dependency(dep_dict)) @@ -407,7 +408,13 @@ def atmanlinit(self): else: dependencies = rocoto.create_dependency(dep=deps) - cycledef = "gdas" + gfs_cyc = self._base["gfs_cyc"] + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + resources = self.get_resource('atmanlinit') task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) @@ -1129,57 +1136,44 @@ def eupd(self): return task - def atmensanalprep(self): - - dump_suffix = self._base["DUMP_SUFFIX"] - gfs_cyc = self._base["gfs_cyc"] - dmpdir = self._base["DMPDIR"] - do_gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False - + def atmensanlinit(self): deps = [] - dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009.nc' - dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} deps.append(rocoto.add_dependency(dep_dict)) - data = f'{dmpdir}/{self.cdump}{dump_suffix}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' - dep_dict = {'type': 'data', 'data': data} + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - cycledef = self.cdump - if self.cdump in ['gfs'] and do_gfs_enkf and gfs_cyc != 4: - cycledef = 'gdas' - - resources = self.get_resource('atmensanalprep') - task = create_wf_task('atmensanalprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef = "gdas" + resources = self.get_resource('atmensanlinit') + task = create_wf_task('atmensanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) return task - def atmensanalrun(self): + def atmensanlrun(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalprep'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('atmensanalrun') - task = create_wf_task('atmensanalrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmensanlrun') + task = create_wf_task('atmensanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task - def atmensanalpost(self): + def atmensanlfinal(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('atmensanalpost') - task = create_wf_task('atmensanalpost', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + resources = self.get_resource('atmensanlfinal') + task = create_wf_task('atmensanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) return task @@ -1211,8 +1205,8 @@ def _get_ecengroups(): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_jediens: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) @@ -1240,8 +1234,8 @@ def esfc(self): deps = [] dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} deps.append(rocoto.add_dependency(dep_dict)) - if self.app_config.do_jediens: - dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanalrun'} + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} else: dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} deps.append(rocoto.add_dependency(dep_dict)) From f159d39a3b28dfcc120cdcdf87d11a611c75061f Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Fri, 21 Apr 2023 15:46:35 -0400 Subject: [PATCH 5/5] Add CI cron jobs (#1476) As a maintainer of the CI framework, I need a set of cron jobs that will fully automate the CI pipeline so that whenever the appropriate label on GitHub is created the PR gets cloned and built followed by a set of functional experiments that are executed and reported on. --- ci/cases/C96C48_hybatmDA.yaml | 15 +++ ci/cases/C96_atm3DVar.yaml | 14 ++ ci/experiments/C96C48_hybatmDA.yaml | 19 --- ci/experiments/C96C48_hybatmDA_also.yaml | 19 --- ci/{environments => platforms}/hera.sh | 3 - ci/{environments => platforms}/orion.sh | 4 +- ci/scripts/check_ci.sh | 115 +++++++++++++++++ ci/scripts/clone-build_ci.sh | 89 +++++++------ ci/scripts/create_experiment.py | 38 ++---- ci/scripts/driver.sh | 121 +++++++----------- ci/scripts/pygw | 1 + ci/scripts/run_ci.sh | 71 ++++++++++ modulefiles/module_gwci.hera.lua | 15 +++ modulefiles/module_gwci.orion.lua | 21 +++ ...setup.hera.lua => module_gwsetup.hera.lua} | 3 +- modulefiles/module_gwsetup.orion.lua | 17 +++ test/diff_grib_files.py | 1 + 17 files changed, 387 insertions(+), 179 deletions(-) create mode 100644 ci/cases/C96C48_hybatmDA.yaml create mode 100644 ci/cases/C96_atm3DVar.yaml delete mode 100644 ci/experiments/C96C48_hybatmDA.yaml delete mode 100644 ci/experiments/C96C48_hybatmDA_also.yaml rename ci/{environments => platforms}/hera.sh (57%) rename ci/{environments => platforms}/orion.sh (65%) create mode 100755 ci/scripts/check_ci.sh create mode 120000 ci/scripts/pygw create mode 100755 ci/scripts/run_ci.sh create mode 100644 modulefiles/module_gwci.hera.lua create mode 100644 modulefiles/module_gwci.orion.lua rename modulefiles/{module_setup.hera.lua => module_gwsetup.hera.lua} (99%) create mode 100644 modulefiles/module_gwsetup.orion.lua diff --git a/ci/cases/C96C48_hybatmDA.yaml b/ci/cases/C96C48_hybatmDA.yaml new file mode 100644 index 0000000000..9efce40900 --- /dev/null +++ b/ci/cases/C96C48_hybatmDA.yaml @@ -0,0 +1,15 @@ +experiment: + mode: cycled + +arguments: + app: ATM + resdet: 96 + resens: 48 + comrot: ${RUNTESTS}/${pslot}/COMROT + expdir: ${RUNTESTS}/${pslot}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122200 + nens: 2 + gfs_cyc: 1 + start: cold diff --git a/ci/cases/C96_atm3DVar.yaml b/ci/cases/C96_atm3DVar.yaml new file mode 100644 index 0000000000..1648432e09 --- /dev/null +++ b/ci/cases/C96_atm3DVar.yaml @@ -0,0 +1,14 @@ +experiment: + mode: cycled + +arguments: + app: ATM + resdet: 96 + comrot: ${RUNTESTS}/${pslot}/COMROT + expdir: ${RUNTESTS}/${pslot}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122100 + nens: 0 + gfs_cyc: 1 + start: cold diff --git a/ci/experiments/C96C48_hybatmDA.yaml b/ci/experiments/C96C48_hybatmDA.yaml deleted file mode 100644 index cf291e5081..0000000000 --- a/ci/experiments/C96C48_hybatmDA.yaml +++ /dev/null @@ -1,19 +0,0 @@ -environment: - HOMEgfs: ${HOMEGFS} # TODO - using the env var ${HOMEgfs} will cause - # the rocoto XML file to have HOMEgfs set to None -experiment: - mode: cycled - -arguments: - pslot: ${pslot} # TODO - same bug as above with HOMEgfs - app: ATM - resdet: 96 - resens: 48 - comrot: ${RUNTEST} - expdir: ${RUNTEST}/expdir - icsdir: ${ICSDIR_ROOT}/C96C48 - idate: 2021122018 - edate: 2021122200 - nens: 2 - gfs_cyc: 1 - start: cold diff --git a/ci/experiments/C96C48_hybatmDA_also.yaml b/ci/experiments/C96C48_hybatmDA_also.yaml deleted file mode 100644 index cf291e5081..0000000000 --- a/ci/experiments/C96C48_hybatmDA_also.yaml +++ /dev/null @@ -1,19 +0,0 @@ -environment: - HOMEgfs: ${HOMEGFS} # TODO - using the env var ${HOMEgfs} will cause - # the rocoto XML file to have HOMEgfs set to None -experiment: - mode: cycled - -arguments: - pslot: ${pslot} # TODO - same bug as above with HOMEgfs - app: ATM - resdet: 96 - resens: 48 - comrot: ${RUNTEST} - expdir: ${RUNTEST}/expdir - icsdir: ${ICSDIR_ROOT}/C96C48 - idate: 2021122018 - edate: 2021122200 - nens: 2 - gfs_cyc: 1 - start: cold diff --git a/ci/environments/hera.sh b/ci/platforms/hera.sh similarity index 57% rename from ci/environments/hera.sh rename to ci/platforms/hera.sh index 843b8b103b..35fe7bca91 100644 --- a/ci/environments/hera.sh +++ b/ci/platforms/hera.sh @@ -1,10 +1,7 @@ #!/usr/bin/bash export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT -export GFS_MODULE_USE="${GFS_CI_ROOT}/global-workflow/modulefiles" export SLURM_ACCOUNT=fv3-cpu export SALLOC_ACCOUNT="${SLURM_ACCOUNT}" export SBATCH_ACCOUNT="${SLURM_ACCOUNT}" export SLURM_QOS=debug -export repo_url="https://github.com/NOAA-EMC/global-workflow.git" -#export repo_url="https://github.com/TerrenceMcGuinness-NOAA/global-workflow.git" export ICSDIR_ROOT="/scratch1/NCEPDEV/global/glopara/data/ICSDIR" diff --git a/ci/environments/orion.sh b/ci/platforms/orion.sh similarity index 65% rename from ci/environments/orion.sh rename to ci/platforms/orion.sh index 901d9488e9..7d69a3b276 100644 --- a/ci/environments/orion.sh +++ b/ci/platforms/orion.sh @@ -1,7 +1,7 @@ #!/usr/bin/bash -export GFS_CI_ROOT="TDB" #TODO -export GFS_MODULE_USE="${GFS_CI_ROOT}/global-workflow/modulefiles" +export GFS_CI_ROOT=/work2/noaa/global/mterry/GFS_CI_ROOT +export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR export SLURM_ACCOUNT=fv3-cpu export SALLOC_ACCOUNT=${SLURM_ACCOUNT} export SBATCH_ACCOUNT=${SLURM_ACCOUNT} diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh new file mode 100755 index 0000000000..aa48e9f894 --- /dev/null +++ b/ci/scripts/check_ci.sh @@ -0,0 +1,115 @@ +#!/bin/bash +set -eux +##################################################################################### +# +# Script description: BASH script for checking for cases in a given PR and +# running rocotostat on each to determine if the experiment has +# succeeded or faild. This script is intended +# to run from within a cron job in the CI Managers account +# Abstract TODO +##################################################################################### + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +GH=${HOME}/bin/gh +REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +module list +set -x +rocotostat=$(which rocotostat) +if [[ -z ${rocotostat+x} ]]; then + echo "rocotostat not found on system" + exit 1 +else + echo "rocotostat being used from ${rocotostat}" +fi + +pr_list_file="open_pr_list" + +if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then + pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}") +else + echo "no PRs to process .. exit" + exit 0 +fi + +############################################################# +# Loop throu all PRs in PR List and look for expirments in +# the RUNTESTS dir and for each one run runcotorun on them +############################################################# + +for pr in ${pr_list}; do + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + echo "Processing Pull Request #${pr} and looking for cases" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + + # If there is no RUNTESTS dir for this PR then cases have not been made yet + if [[ ! -d "${pr_dir}/RUNTESTS" ]]; then + continue + fi + num_cases=$(find "${pr_dir}/RUNTESTS" -mindepth 1 -maxdepth 1 -type d | wc -l) || true + + #Check for PR success when ${pr_dir}/RUNTESTS is void of subfolders + # since all successfull ones where previously removed + if [[ "${num_cases}" -eq 0 ]] && [[ -d "${pr_dir}/RUNTESTS" ]]; then + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + sed -i "/${pr}/d" "${GFS_CI_ROOT}/${pr_list_file}" + # Completely remove the PR and its cloned repo on sucess of all cases + rm -Rf "${pr_dir}" + continue + fi + + for cases in "${pr_dir}/RUNTESTS/"*; do + pslot=$(basename "${cases}") + xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" + db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true + num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true + num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true + num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true + num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true + if [[ ${num_failed} -ne 0 ]]; then + { + echo "Experiment ${pslot} Terminated: *FAILED*" + echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + sed -i "/${pr}/d" "${GFS_CI_ROOT}/${pr_list_file}" + fi + if [[ "${num_done}" -eq "${num_cycles}" ]]; then + { + echo "Experiment ${pslot} completed: *SUCCESS*" + echo "Experiment ${pslot} Completed at $(date)" || true + echo -n "with ${num_succeeded} successfully completed jobs" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + #Remove Experment cases that completed successfully + rm -Rf "${pr_dir}/RUNTESTS/${pslot}" + fi + done +done diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index e6b5eb53ef..022cc44378 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -1,11 +1,6 @@ #!/bin/bash set -eux -################################################################# -# TODO using static build for GitHub CLI until fixed in HPC-Stack -################################################################# -GH=/home/Terry.McGuinness/bin/gh -repo_url=${repo_url:-"https://github.com/global-workflow.git"} ##################################################################### # Usage and arguments for specfifying cloned directgory ##################################################################### @@ -45,25 +40,14 @@ while getopts "p:d:o:h" opt; do esac done -#################################################################### -# start output file -{ - echo "Automated global-workflow Testing Results:" - echo "Machine: ${CI_HOST}" - echo '```' - echo "Start: $(date) on $(hostname)" || true - echo "---------------------------------------------------" -} >> "${outfile}" -###################################################################### - -cd "${repodir}" +cd "${repodir}" || exit 1 # clone copy of repo if [[ -d global-workflow ]]; then rm -Rf global-workflow fi -git clone "${repo_url}" -cd global-workflow +git clone "${REPO_URL}" +cd global-workflow || exit 1 pr_state=$(gh pr view "${PR}" --json state --jq '.state') if [[ "${pr_state}" != "OPEN" ]]; then @@ -73,34 +57,63 @@ if [[ "${pr_state}" != "OPEN" ]]; then fi # checkout pull request -"${GH}" pr checkout "${PR}" --repo "${repo_url}" +"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +HOMEgfs="${PWD}" +source "${HOMEgfs}/ush/detect_machine.sh" + +#################################################################### +# start output file +{ + echo "Automated global-workflow Testing Results:" + echo '```' + echo "Machine: ${MACHINE_ID^}" + echo "Start: $(date) on $(hostname)" || true + echo "---------------------------------------------------" +} >> "${outfile}" +###################################################################### # get commit hash commit=$(git log --pretty=format:'%h' -n 1) echo "${commit}" > "../commit" -# run build script -cd sorc +# run checkout script +cd sorc || exit 1 +set +e +./checkout.sh -c -g -u &>> log.checkout +checkout_status=$? +if [[ ${checkout_status} != 0 ]]; then + { + echo "Checkout: *FAILED*" + echo "Checkout: Failed at $(date)" || true + echo "Checkout: see output at ${PWD}/log.checkout" + } >> "${outfile}" + exit "${checkout_status}" +else + { + echo "Checkout: *SUCCESS*" + echo "Checkout: Completed at $(date)" || true + } >> "${outfile}" +fi + +# build full cycle +source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./checkout.sh -g -c -# build full cycle -./build_all.sh -g &>> log.build - -# Validations +./build_all.sh &>> log.build build_status=$? -if [[ ${build_status} -eq 0 ]]; then -{ - echo "Build: *SUCCESS*" - echo "Build: Completed at $(date)" || true -} >> "${outfile}" + +if [[ ${build_status} != 0 ]]; then + { + echo "Build: *FAILED*" + echo "Build: Failed at $(date)" || true + echo "Build: see output at ${PWD}/log.build" + } >> "${outfile}" + exit "${build_status}" else -{ - echo "Build: *FAILED*" - echo "Build: Failed at $(date)" || true - echo "Build: see output at ${PWD}/log.build" -} - echo '```' >> "${outfile}" + { + echo "Build: *SUCCESS*" + echo "Build: Completed at $(date)" || true + } >> "${outfile}" fi ./link_workflow.sh diff --git a/ci/scripts/create_experiment.py b/ci/scripts/create_experiment.py index 6b946f3a4a..ce95714d48 100755 --- a/ci/scripts/create_experiment.py +++ b/ci/scripts/create_experiment.py @@ -10,14 +10,14 @@ ${HOMEgfs}/workflow/setup_expt.py ${HOMEgfs}/workflow/setup_xml.py -The yaml file are simply the argments for these two scripts. +The yaml file are simply the arguments for these two scripts. After this scripts runs these two the use will have an experiment ready for launching Output ------ -Functionally an experement is setup as a result running the two scripts discribed above -with an error code of 0 apon success. +Functionally an experiment is setup as a result running the two scripts described above +with an error code of 0 upon success. """ import sys @@ -30,8 +30,6 @@ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -from workflow.hosts import Host - logger = Logger(level='DEBUG', colored_log=True) @@ -47,12 +45,7 @@ def input_args(): Description ----------- - A full path to a YAML file with the following format with required sections: environment, experiment, arguments - - environment: - HOMEgfs: ${HOMEGFS} - used to pass the environment variable $HOMEGFS - of the path to the global-workflow repo being tested + A full path to a YAML file with the following format with required sections: experiment, arguments experiment: mode: @@ -60,7 +53,7 @@ def input_args(): arguments: holds all the remaining key values pairs for all requisite arguments documented for setup_expt.py - + Note: the argument pslot is derived from the basename of the yamlfile itself Returns ------- @@ -68,10 +61,10 @@ def input_args(): args: Namespace Namespace with the value of the file path to a yaml file from the key yaml - +:w """ - description = """Single agument as a yaml file containing the + description = """Single argument as a yaml file containing the key value pairs as arguments to setup_expt.py """ @@ -79,6 +72,7 @@ def input_args(): formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument('--yaml', help='yaml configuration file per experiment', type=str, required=True) + parser.add_argument('--dir', help='full path to top level of repo of global-workflow', type=str, required=True) args = parser.parse_args() return args @@ -87,17 +81,10 @@ def input_args(): if __name__ == '__main__': user_inputs = input_args() - - try: - host = Host() - logger.info(f'Running on HOST:{host.machine}') - except NotImplementedError: - logger.error(f'HOST:{socket.gethostname()} is not currently supported') - sys.exit(1) - setup_expt_args = YAMLFile(path=user_inputs.yaml) - HOMEgfs = setup_expt_args.environment.HOMEgfs + HOMEgfs = user_inputs.dir + pslot = Path(user_inputs.yaml).stem mode = setup_expt_args.experiment.mode setup_expt_cmd = Executable(Path.absolute(Path.joinpath(Path(HOMEgfs), 'workflow', 'setup_expt.py'))) @@ -107,11 +94,14 @@ def input_args(): setup_expt_cmd.add_default_arg(f'--{conf}') setup_expt_cmd.add_default_arg(str(value)) + setup_expt_cmd.add_default_arg('--pslot') + setup_expt_cmd.add_default_arg(pslot) + logger.info(f'Run command: {setup_expt_cmd.command}') setup_expt_cmd(output='stdout_expt', error='stderr_expt') setup_xml_cmd = Executable(Path.absolute(Path.joinpath(Path(HOMEgfs), 'workflow', 'setup_xml.py'))) - expdir = Path.absolute(Path.joinpath(Path(setup_expt_args.arguments.expdir), Path(setup_expt_args.arguments.pslot))) + expdir = Path.absolute(Path.joinpath(Path(setup_expt_args.arguments.expdir), Path(pslot))) setup_xml_cmd.add_default_arg(str(expdir)) logger.info(f'Run command: {setup_xml_cmd.command}') diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 60634c3352..0bd90db36c 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -1,5 +1,6 @@ -#!/bin/bash --login -# +#!/bin/bash +set -eux + ##################################################################################### # # Script description: Top level driver script for checking PR @@ -7,7 +8,7 @@ # # Abstract: # -# This script uses GitHub CLI to check for Pull Requests with {machine}-CI tags on the +# This script uses GitHub CLI to check for Pull Requests with CI-Ready-${machine} tags on the # development branch for the global-workflow repo. It then stages tests directories per # PR number and calls clone-build_ci.sh to perform a clone and full build from $(HOMEgfs)/sorc # of the PR. It then is ready to run a suite of regression tests with various @@ -17,50 +18,26 @@ ################################################################# # TODO using static build for GitHub CLI until fixed in HPC-Stack ################################################################# -GH=/home/Terry.McGuinness/bin/gh -repo_url=${repo_url:-"https://github.com/NOAA-EMC/global-workflow.git"} +export GH=${HOME}/bin/gh +export REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} ################################################################ # Setup the reletive paths to scripts and PS4 for better logging ################################################################ -WF_ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" scriptname=$(basename "${BASH_SOURCE[0]}") echo "Begin ${scriptname} at $(date -u)" || true export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' - -usage() { - set +x - echo - echo "Usage: $0 -h" - echo - echo " -h display this message and quit" - echo - echo "This is top level script to run CI tests on the global-workflow repo" - if [[ -n "${TARGET+x}" ]]; then - echo "on the DEFAULT: ${TARGET} machine" - fi - echo - exit 0 -} - - ######################################################################### # Set up runtime environment varibles for accounts on supproted machines ######################################################################### -source "${WF_ROOT_DIR}/ush/detect_machine.sh" -if [[ "${MACHINE_ID}" != "UNKNOWN" ]]; then - TARGET="${MACHINE_ID}" -else - echo "Unsupported platform. Exiting with error." - exit 1 -fi - -case ${TARGET} in +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in hera | orion) - echo "Running Automated Testing on ${TARGET}" - source "${WF_ROOT_DIR}/ci/environments/${TARGET}.sh" + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" ;; *) echo "Unsupported platform. Exiting with error." @@ -68,15 +45,22 @@ case ${TARGET} in ;; esac +###################################################### +# setup runtime env for correct python install and git +###################################################### +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +set -x + ############################################################ # query repo and get list of open PRs with tags {machine}-CI ############################################################ -set -eux -export CI_HOST="${TARGET^}" pr_list_file="open_pr_list" -rm -f "${pr_list_file}" -list=$(${GH} pr list --repo "${repo_url}" --label "${CI_HOST}-CI" --state "open") -list=$(echo "${list}" | awk '{print $1;}' > "${GFS_CI_ROOT}/${pr_list_file}") +touch "${GFS_CI_ROOT}/${pr_list_file}" +list=$(${GH} pr list --repo "${REPO_URL}" --label "CI-${MACHINE_ID^}-Ready" --state "open") +list=$(echo "${list}" | awk '{print $1;}' >> "${GFS_CI_ROOT}/${pr_list_file}") if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}") @@ -84,71 +68,64 @@ else echo "no PRs to process .. exit" exit 0 fi - + ############################################################# # Loop throu all open PRs -# Clone, checkout, build, creat set of experiments, for each +# Clone, checkout, build, creat set of cases, for each ############################################################# -cd "${GFS_CI_ROOT}" for pr in ${pr_list}; do - "${GH}" pr edit --repo "${repo_url}" "${pr}" --remove-label "${CI_HOST}-CI" --add-label "${CI_HOST}-Running" + + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Ready" --add-label "CI-${MACHINE_ID^}-Building" echo "Processing Pull Request #${pr}" pr_dir="${GFS_CI_ROOT}/PR/${pr}" mkdir -p "${pr_dir}" # call clone-build_ci to clone and build PR - id=$("${GH}" pr view "${pr}" --repo "${repo_url}" --json id --jq '.id') - "${WF_ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + set +e + "${HOMEgfs}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" ci_status=$? + set -e if [[ ${ci_status} -eq 0 ]]; then - #setup runtime env for correct python install - export HOMEGFS="${pr_dir}/global-workflow" - module use "${HOMEGFS}/modulefiles" - module load "module_setup.${TARGET}" - module list #setup space to put an experiment - export RUNTEST="${pr_dir}/RUNTEST" - rm -Rf "${RUNTEST:?}"/* - mkdir -p "${RUNTEST}" - #make links to the python packages used in the PR'ed repo - cd "${WF_ROOT_DIR}/ci/scripts" - if [[ ! -L workflow ]]; then - ln -s "${HOMEGFS}/workflow" workflow - fi - if [[ ! -L pygw ]]; then - ln -s "${HOMEGFS}/ush/python/pygw/src/pygw" pygw - fi + # export RUNTESTS for yaml case files to pickup + export RUNTESTS="${pr_dir}/RUNTESTS" + rm -Rf "${pr_dir:?}/RUNTESTS/"* + ############################################################# - # loop over every yaml file in ${WF_ROOT_DIR}/ci/experiments + # loop over every yaml file in ${HOMEgfs}/ci/cases # and create an run directory for each one for this PR loop ############################################################# - for yaml_config in "${WF_ROOT_DIR}/ci/experiments/"*.yaml; do + for yaml_config in "${HOMEgfs}/ci/cases/"*.yaml; do pslot=$(basename "${yaml_config}" .yaml) || true export pslot - "${WF_ROOT_DIR}/ci/scripts/create_experiment.py" --yaml "${WF_ROOT_DIR}/ci/experiments/${pslot}.yaml" + set +e + "${HOMEgfs}/ci/scripts/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/${pslot}.yaml" --dir "${pr_dir}/global-workflow" ci_status=$? + set -e if [[ ${ci_status} -eq 0 ]]; then { - echo "Created experiment" - echo "Experiment setup: Completed at $(date) for expirment ${pslot}" || true + echo "Created experiment: *SUCCESS*" + echo "Case setup: Completed at $(date) for experiment ${pslot}" || true } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" else { - echo "Failed on createing experiment ${pslot}" + echo "Failed to create experiment}: *FAIL* ${pslot}" echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit "${pr}" --repo "${repo_url}" --remove-label "${CI_HOST}-Running" --add-label "${CI_HOST}-Failed" + "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" fi done - "${GH}" pr comment "${pr}" --repo "${repo_url}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit --repo "${repo_url}" "${pr}" --remove-label "${CI_HOST}-Running" --add-label "${CI_HOST}-Passed" + else { echo "Failed on cloning and building global-workflowi PR: ${pr}" - echo "CI on ${CI_HOST} failed to build on $(date) for repo ${repo_url}}" || true + echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}}" || true } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit "${pr}" --repo "${repo_url}" --remove-label "${CI_HOST}-Running" --add-label "${CI_HOST}-Failed" + "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" fi + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" done # looping over each open and labeled PR diff --git a/ci/scripts/pygw b/ci/scripts/pygw new file mode 120000 index 0000000000..77d784f6ca --- /dev/null +++ b/ci/scripts/pygw @@ -0,0 +1 @@ +../../ush/python/pygw/src/pygw \ No newline at end of file diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh new file mode 100755 index 0000000000..c79ea06e77 --- /dev/null +++ b/ci/scripts/run_ci.sh @@ -0,0 +1,71 @@ +#!/bin/bash +set -eux + +##################################################################################### +# +# Script description: BASH script for checking for cases in a given PR and +# simply running rocotorun on each. This script is intended +# to run from within a cron job in the CI Managers account +# Abstract TODO +##################################################################################### + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +module list +set -eux +rocotorun=$(which rocotorun) +if [[ -z ${var+x} ]]; then + echo "rocotorun being used from ${rocotorun}" +else + echo "rocotorun not found on system" + exit 1 +fi + +pr_list_file="open_pr_list" + +if [[ -s "${GFS_CI_ROOT}/${pr_list_file}" ]]; then + pr_list=$(cat "${GFS_CI_ROOT}/${pr_list_file}") +else + echo "no PRs to process .. exit" + exit 0 +fi + +############################################################# +# Loop throu all PRs in PR List and look for expirments in +# the RUNTESTS dir and for each one run runcotorun on them +############################################################# + +for pr in ${pr_list}; do + echo "Processing Pull Request #${pr} and looking for cases" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + for cases in "${pr_dir}/RUNTESTS/"*; do + pslot=$(basename "${cases}") + xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" + db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + echo "Running: ${rocotorun} -v 6 -w ${xml} -d ${db}" + "${rocotorun}" -v 10 -w "${xml}" -d "${db}" + done +done + diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua new file mode 100644 index 0000000000..f4b62a5fd2 --- /dev/null +++ b/modulefiles/module_gwci.hera.lua @@ -0,0 +1,15 @@ +help([[ +Load environment to run GFS workflow setup scripts on Hera +]]) + +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.5.274")) +load(pathJoin("hpc-impi", "2018.0.4")) + +load(pathJoin("netcdf","4.7.4")) +load(pathJoin("nccmp","1.8.7.0")) +load(pathJoin("wgrib2", "2.0.8")) + +whatis("Description: GFS run setup CI environment") diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua new file mode 100644 index 0000000000..779e80a454 --- /dev/null +++ b/modulefiles/module_gwci.orion.lua @@ -0,0 +1,21 @@ +help([[ +Load environment to run GFS workflow ci scripts on Orion +]]) + +prepend_path("MODULEPATH", "/apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "2018.4")) +load(pathJoin("hpc-impi", "2018.4")) +load(pathJoin("netcdf","4.7.4")) +load(pathJoin("nccmp"," 1.8.7.0")) +load(pathJoin("contrib","0.1")) +load(pathJoin("wgrib2","3.0.2")) + +prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-intel", "2018.4")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/modulefiles/module_setup.hera.lua b/modulefiles/module_gwsetup.hera.lua similarity index 99% rename from modulefiles/module_setup.hera.lua rename to modulefiles/module_gwsetup.hera.lua index 4971a3f2d9..a07b32b6a6 100644 --- a/modulefiles/module_setup.hera.lua +++ b/modulefiles/module_gwsetup.hera.lua @@ -2,10 +2,9 @@ help([[ Load environment to run GFS workflow setup scripts on Hera ]]) --- Temporary until official hpc-stack is updated - load(pathJoin("rocoto")) +-- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.2.0")) load(pathJoin("hpc-miniconda3", "4.6.14")) diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua new file mode 100644 index 0000000000..37f3187fb4 --- /dev/null +++ b/modulefiles/module_gwsetup.orion.lua @@ -0,0 +1,17 @@ +help([[ +Load environment to run GFS workflow ci scripts on Orion +]]) + +-- Temporary until official hpc-stack is updated + +prepend_path("MODULEPATH", "/apps/modulefiles/core") +load(pathJoin("contrib","0.1")) +load(pathJoin("rocoto","1.3.3")) +load(pathJoin("git","2.28.0")) + +prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/test/diff_grib_files.py b/test/diff_grib_files.py index e0eb7936db..9c01afbb18 100755 --- a/test/diff_grib_files.py +++ b/test/diff_grib_files.py @@ -72,4 +72,5 @@ def count_nonid_corr(test_string: str, quiet=False): wgrib2_cmd = f"wgrib2 {fileA} -var -rpn 'sto_1' -import_grib {fileB} -rpn 'rcl_1:print_corr'" string = subprocess.run(wgrib2_cmd, shell=True, stdout=subprocess.PIPE).stdout.decode("utf-8") + count_nonid_corr(string)