From 254c42bfc1ec9751d8e23565ef6eccbec171ee50 Mon Sep 17 00:00:00 2001 From: Steve Goldhaber Date: Tue, 26 Oct 2021 20:43:08 -0600 Subject: [PATCH 1/2] Add new runtime info routine, ccpp_physics_suite_schemes Convert framework options to DDT with methods (runtime environment). Convert shebangs to python3 Change ccpp_error_flag to ccpp_error_code Add polymorphic variable property Remove optional variable property Introduct FortranVar to allow Fortran to have undocumented optional variables Cleanup ccpp_error_code usage Fix hang in datatable write plus catch more bad units Split var_props.py from metavar.py to allow independent development Added VarCompatObject to hold compatibility and transform information about a pair of Var objects. This object looks for compatible differences in units, kind, or dimensions. Created unit tests for variable compatibility. Move PrettyElementTree to xml_tools Refactor ccpp_suite to separate out SuiteObjects Created /test/run_tests.sh to run all capgen tests. --- .github/workflows/python.yaml | 1 + .travis.yml | 3 +- doc/HelloWorld/hello_scheme.meta | 12 +- doc/HelloWorld/hello_world_host.meta | 4 +- doc/HelloWorld/temp_adjust.meta | 12 +- scripts/ccpp_capgen.py | 274 +- scripts/ccpp_datafile.py | 117 +- scripts/ccpp_fortran_to_metadata.py | 15 +- scripts/ccpp_prebuild.py | 2 +- scripts/ccpp_suite.py | 2224 ++--------------- scripts/code_block.py | 2 +- scripts/common.py | 2 +- scripts/constituents.py | 228 +- scripts/conversion_tools/unit_conversion.py | 2 +- scripts/ddt_library.py | 71 +- scripts/file_utils.py | 4 +- scripts/fortran_tools/fortran_write.py | 2 +- scripts/fortran_tools/parse_fortran.py | 85 +- scripts/fortran_tools/parse_fortran_file.py | 116 +- scripts/framework_env.py | 385 +++ scripts/host_cap.py | 59 +- scripts/host_model.py | 58 +- scripts/metadata2html.py | 2 +- scripts/metadata_parser.py | 2 +- scripts/metadata_table.py | 121 +- scripts/metavar.py | 891 ++----- scripts/mkcap.py | 2 +- scripts/mkdoc.py | 2 +- scripts/mkstatic.py | 2 +- scripts/parse_tools/__init__.py | 2 + scripts/parse_tools/parse_checkers.py | 30 +- scripts/parse_tools/parse_log.py | 2 +- scripts/parse_tools/parse_object.py | 2 +- scripts/parse_tools/parse_source.py | 2 +- scripts/parse_tools/preprocess.py | 2 +- scripts/parse_tools/xml_tools.py | 117 +- scripts/state_machine.py | 4 +- scripts/suite_objects.py | 1908 ++++++++++++++ scripts/var_props.py | 1289 ++++++++++ src/ccpp_constituent_prop_mod.F90 | 354 +-- test/advection_test/cld_ice.meta | 8 +- test/advection_test/cld_liq.meta | 8 +- test/advection_test/run_test | 4 +- test/advection_test/test_host.F90 | 7 +- test/advection_test/test_host.meta | 11 +- test/advection_test/test_host_data.meta | 2 +- test/advection_test/test_reports.py | 4 +- test/capgen_test/environ_conditions.meta | 12 +- test/capgen_test/make_ddt.meta | 12 +- test/capgen_test/run_test | 8 +- test/capgen_test/temp_adjust.meta | 12 +- test/capgen_test/temp_calc_adjust.meta | 12 +- test/capgen_test/temp_set.meta | 16 +- test/capgen_test/test_host.F90 | 11 +- test/capgen_test/test_host.meta | 11 +- test/capgen_test/test_host_data.meta | 2 +- test/capgen_test/test_reports.py | 6 +- test/run_tests.sh | 40 + test/unit_tests/sample_files/test_host.meta | 4 +- .../test_multi_ccpp_arg_tables.meta | 12 +- .../CCPPeq1_var_in_fort_meta.meta | 4 +- .../CCPPeq1_var_missing_in_fort.meta | 4 +- .../CCPPeq1_var_missing_in_meta.meta | 4 +- .../CCPPgt1_var_in_fort_meta.meta | 4 +- .../CCPPnotset_var_missing_in_meta.meta | 4 +- .../invalid_dummy_arg.meta | 4 +- .../invalid_subr_stmnt.meta | 4 +- .../sample_scheme_files/mismatch_intent.meta | 12 +- .../missing_arg_table.meta | 8 +- .../missing_fort_header.meta | 12 +- .../sample_scheme_files/reorder.meta | 12 +- .../sample_scheme_files/temp_adjust.meta | 12 +- test/unit_tests/test_metadata_scheme_file.py | 69 +- test/unit_tests/test_metadata_table.py | 153 +- test/unit_tests/test_var_transforms.py | 424 ++++ 75 files changed, 5683 insertions(+), 3661 deletions(-) create mode 100644 scripts/framework_env.py create mode 100644 scripts/suite_objects.py create mode 100644 scripts/var_props.py create mode 100755 test/run_tests.sh create mode 100644 test/unit_tests/test_var_transforms.py diff --git a/.github/workflows/python.yaml b/.github/workflows/python.yaml index 0a05138d..c630c438 100644 --- a/.github/workflows/python.yaml +++ b/.github/workflows/python.yaml @@ -22,6 +22,7 @@ jobs: pip install flake8 pytest if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Test with pytest + if: github.repository == 'NCAR/ccpp-framework' # Only run on main repo run: | export PYTHONPATH=$(pwd)/scripts:$(pwd)/scripts/parse_tools pytest diff --git a/.travis.yml b/.travis.yml index d79caced..fa22d398 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,10 @@ language: python python: - - "2.7" - "3.6" - "3.7" + - "3.8" + - "3.9" branches: only: diff --git a/doc/HelloWorld/hello_scheme.meta b/doc/HelloWorld/hello_scheme.meta index a5995b7a..6d62d3e9 100644 --- a/doc/HelloWorld/hello_scheme.meta +++ b/doc/HelloWorld/hello_scheme.meta @@ -53,9 +53,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -71,9 +71,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -89,9 +89,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/doc/HelloWorld/hello_world_host.meta b/doc/HelloWorld/hello_world_host.meta index 938ebdc5..0622388f 100644 --- a/doc/HelloWorld/hello_world_host.meta +++ b/doc/HelloWorld/hello_world_host.meta @@ -22,8 +22,8 @@ type = character kind = len=512 [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer diff --git a/doc/HelloWorld/temp_adjust.meta b/doc/HelloWorld/temp_adjust.meta index 2e95195e..702cbd98 100644 --- a/doc/HelloWorld/temp_adjust.meta +++ b/doc/HelloWorld/temp_adjust.meta @@ -40,9 +40,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -58,9 +58,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -76,9 +76,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/scripts/ccpp_capgen.py b/scripts/ccpp_capgen.py index 3e9075ba..7a12729a 100755 --- a/scripts/ccpp_capgen.py +++ b/scripts/ccpp_capgen.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Create CCPP parameterization caps, host-model interface code, @@ -10,7 +10,6 @@ from __future__ import unicode_literals from __future__ import print_function -import argparse import sys import os import logging @@ -22,6 +21,7 @@ from file_utils import create_file_list, move_modified_files from file_utils import KINDS_FILENAME, KINDS_MODULE from fortran_tools import parse_fortran_file, FortranWriter +from framework_env import parse_command_line from host_cap import write_host_cap from host_model import HostModel from metadata_table import parse_metadata_file, SCHEME_HEADER_TYPE @@ -34,9 +34,6 @@ ## Init this now so that all Exceptions can be trapped _LOGGER = init_log(os.path.basename(__file__)) -_EPILOG = ''' -''' - ## Recognized Fortran filename extensions _FORTRAN_FILENAME_EXTENSIONS = ['F90', 'f90', 'F', 'f'] @@ -46,71 +43,6 @@ ## Metadata table types where order is significant _ORDERED_TABLE_TYPES = [SCHEME_HEADER_TYPE] -############################################################################### -def parse_command_line(args, description): -############################################################################### - """Create an ArgumentParser to parse and return command-line arguments""" - ap_format = argparse.RawTextHelpFormatter - parser = argparse.ArgumentParser(description=description, - formatter_class=ap_format, epilog=_EPILOG) - - parser.add_argument("--host-files", metavar='', - type=str, required=True, - help="""Comma separated list of host filenames to process -Filenames with a '.meta' suffix are treated as host model metadata files -Filenames with a '.txt' suffix are treated as containing a list of .meta -filenames""") - - parser.add_argument("--scheme-files", metavar='', - type=str, required=True, - help="""Comma separated list of scheme filenames to process -Filenames with a '.meta' suffix are treated as scheme metadata files -Filenames with a '.txt' suffix are treated as containing a list of .meta -filenames""") - - parser.add_argument("--suites", metavar='', - type=str, required=True, - help="""Comma separated list of suite definition filenames to process -Filenames with a '.xml' suffix are treated as suite definition XML files -Other filenames are treated as containing a list of .xml filenames""") - - parser.add_argument("--preproc-directives", - metavar='VARDEF1[,VARDEF2 ...]', type=str, default='', - help="Proprocessor directives used to correctly parse source files") - - parser.add_argument("--ccpp-datafile", type=str, - metavar='', - default="datatable.xml", - help="Filename for information on content generated by the CCPP Framework") - - parser.add_argument("--output-root", type=str, - metavar='', - default=os.getcwd(), - help="directory for generated files") - - parser.add_argument("--host-name", type=str, default='', - help='''Name of host model to use in CCPP API -If this option is passed, a host model cap is generated''') - - parser.add_argument("--clean", action='store_true', default=False, - help='Remove files created by this script, then exit') - - parser.add_argument("--kind-phys", type=str, default='REAL64', - metavar="kind_phys", - help='Data size for real(kind_phys) data') - - parser.add_argument("--generate-docfiles", - metavar='HTML | Latex | HTML,Latex', type=str, - help="Generate LaTeX and/or HTML documentation") - - parser.add_argument("--force-overwrite", action='store_true', default=False, - help="""Overwrite all CCPP-generated files, even -if unmodified""") - parser.add_argument("--verbose", action='count', default=0, - help="Log more activity, repeat for increased output") - pargs = parser.parse_args(args) - return pargs - ############################################################################### def delete_pathnames_from_file(capfile, logger): ############################################################################### @@ -177,20 +109,26 @@ def find_associated_fortran_file(filename): return fort_filename ############################################################################### -def create_kinds_file(kind_phys, output_dir, logger): +def create_kinds_file(run_env, output_dir): ############################################################################### "Create the kinds.F90 file to be used by CCPP schemes and suites" kinds_filepath = os.path.join(output_dir, KINDS_FILENAME) - if logger is not None: + if run_env.logger is not None: msg = 'Writing {} to {}' - logger.info(msg.format(KINDS_FILENAME, output_dir)) + run_env.logger.info(msg.format(KINDS_FILENAME, output_dir)) # end if + kind_types = run_env.kind_types() with FortranWriter(kinds_filepath, "w", "kinds for CCPP", KINDS_MODULE) as kindf: - use_stmt = 'use ISO_FORTRAN_ENV, only: kind_phys => {}' - kindf.write(use_stmt.format(kind_phys), 1) + for kind_type in kind_types: + use_stmt = "use ISO_FORTRAN_ENV, only: {} => {}" + kindf.write(use_stmt.format(kind_type, + run_env.kind_spec(kind_type)), 1) + # end for kindf.write_preamble() - kindf.write('public kind_phys', 1) + for kind_type in kind_types: + kindf.write("public :: {}".format(kind_type), 1) + # end for # end with return kinds_filepath @@ -498,7 +436,7 @@ def duplicate_item_error(title, filename, itype, orig_item): raise CCPPError(errmsg.format(**edict)) ############################################################################### -def parse_host_model_files(host_filenames, preproc_defs, host_name, logger): +def parse_host_model_files(host_filenames, host_name, run_env): ############################################################################### """ Gather information from host files (e.g., DDTs, registry) and @@ -507,13 +445,13 @@ def parse_host_model_files(host_filenames, preproc_defs, host_name, logger): header_dict = {} table_dict = {} known_ddts = list() + logger = run_env.logger for filename in host_filenames: logger.info('Reading host model data from {}'.format(filename)) # parse metadata file - mtables = parse_metadata_file(filename, known_ddts, logger) + mtables = parse_metadata_file(filename, known_ddts, run_env) fort_file = find_associated_fortran_file(filename) - ftables = parse_fortran_file(fort_file, preproc_defs=preproc_defs, - logger=logger) + ftables = parse_fortran_file(fort_file, run_env) # Check Fortran against metadata (will raise an exception on error) mheaders = list() for sect in [x.sections() for x in mtables]: @@ -550,11 +488,11 @@ def parse_host_model_files(host_filenames, preproc_defs, host_name, logger): if not host_name: host_name = None # end if - host_model = HostModel(table_dict, host_name, logger) + host_model = HostModel(table_dict, host_name, run_env) return host_model ############################################################################### -def parse_scheme_files(scheme_filenames, preproc_defs, logger): +def parse_scheme_files(scheme_filenames, run_env): ############################################################################### """ Gather information from scheme files (e.g., init, run, and finalize @@ -563,13 +501,13 @@ def parse_scheme_files(scheme_filenames, preproc_defs, logger): table_dict = {} # Duplicate check and for dependencies processing header_dict = {} # To check for duplicates known_ddts = list() + logger = run_env.logger for filename in scheme_filenames: logger.info('Reading CCPP schemes from {}'.format(filename)) # parse metadata file - mtables = parse_metadata_file(filename, known_ddts, logger) + mtables = parse_metadata_file(filename, known_ddts, run_env) fort_file = find_associated_fortran_file(filename) - ftables = parse_fortran_file(fort_file, preproc_defs=preproc_defs, - logger=logger) + ftables = parse_fortran_file(fort_file, run_env) # Check Fortran against metadata (will raise an exception on error) mheaders = list() for sect in [x.sections() for x in mtables]: @@ -621,105 +559,95 @@ def clean_capgen(cap_output_file, logger): set_log_level(logger, log_level) ############################################################################### -def capgen(host_files, scheme_files, suites, datatable_file, preproc_defs, - gen_hostcap, gen_docfiles, output_dir, host_name, kind_phys, - force_overwrite, logger): +def capgen(run_env): ############################################################################### """Parse indicated host, scheme, and suite files. Generate code to allow host model to run indicated CCPP suites.""" + ## A few sanity checks + ## Make sure output directory is legit + if os.path.exists(run_env.output_dir): + if not os.path.isdir(run_env.output_dir): + errmsg = "output-root, '{}', is not a directory" + raise CCPPError(errmsg.format(run_env.output_root)) + # end if + if not os.access(run_env.output_dir, os.W_OK): + errmsg = "Cannot write files to output-root ({})" + raise CCPPError(errmsg.format(run_env.output_root)) + # end if (output_dir is okay) + else: + # Try to create output_dir (let it crash if it fails) + os.makedirs(run_env.output_dir) + # end if + host_files = run_env.host_files + host_name = run_env.host_name + scheme_files = run_env.scheme_files # We need to create three lists of files, hosts, schemes, and SDFs - host_files = create_file_list(host_files, ['meta'], 'Host', logger) - scheme_files = create_file_list(scheme_files, ['meta'], 'Scheme', logger) - sdfs = create_file_list(suites, ['xml'], 'Suite', logger) - check_for_writeable_file(datatable_file, "Cap output datatable") + host_files = create_file_list(run_env.host_files, ['meta'], 'Host', + run_env.logger) + scheme_files = create_file_list(run_env.scheme_files, ['meta'], + 'Scheme', run_env.logger) + sdfs = create_file_list(run_env.suites, ['xml'], 'Suite', run_env.logger) + check_for_writeable_file(run_env.datatable_file, "Cap output datatable") ##XXgoldyXX: Temporary warning - if gen_docfiles: + if run_env.generate_docfiles: raise CCPPError("--generate-docfiles not yet supported") # end if - # Turn preproc_defs into a dictionary, start with a list to process - if isinstance(preproc_defs, list): - # Someone already handed us a list - preproc_list = preproc_defs - elif (not preproc_defs) or (preproc_defs == 'UNSET'): - # No preprocessor definitions - preproc_list = list() - elif ',' in preproc_defs: - # String of definitions, separated by commas - preproc_list = [x.strip() for x in preproc_defs.split(',')] - else: - # String of definitions, separated by spaces - preproc_list = [x.strip() for x in preproc_defs.split(' ') if x] - # end if - # Turn the list into a dictionary - preproc_defs = {} - for item in preproc_list: - tokens = [x.strip() for x in item.split('=', 1)] - key = tokens[0] - if key[0:2] == '-D': - key = key[2:] - # end if - if len(tokens) > 1: - value = tokens[1] - else: - value = None - # end if - preproc_defs[key] = value - # end for # First up, handle the host files - host_model = parse_host_model_files(host_files, preproc_defs, - host_name, logger) + host_model = parse_host_model_files(host_files, host_name, run_env) # Next, parse the scheme files - scheme_headers, scheme_tdict = parse_scheme_files(scheme_files, - preproc_defs, logger) + scheme_headers, scheme_tdict = parse_scheme_files(scheme_files, run_env) ddts = host_model.ddt_lib.keys() - if ddts: - logger.debug("DDT definitions = {}".format(ddts)) + if ddts and run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): + run_env.logger.debug("DDT definitions = {}".format(ddts)) # end if plist = host_model.prop_list('local_name') - logger.debug("{} variables = {}".format(host_model.name, plist)) - logger.debug("schemes = {}".format([x.title for x in scheme_headers])) + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): + run_env.logger.debug("{} variables = {}".format(host_model.name, plist)) + run_env.logger.debug("schemes = {}".format([x.title + for x in scheme_headers])) # Finally, we can get on with writing suites # Make sure to write to temporary location if files exist in - if not os.path.exists(output_dir): + if not os.path.exists(run_env.output_dir): # Try to create output_dir (let it crash if it fails) - os.makedirs(output_dir) + os.makedirs(run_env.output_dir) # Nothing here, use it for output - outtemp_dir = output_dir - elif not os.listdir(output_dir): + outtemp_dir = run_env.output_dir + elif not os.listdir(run_env.output_dir): # Nothing here, use it for output - outtemp_dir = output_dir + outtemp_dir = run_env.output_dir else: # We need to create a temporary staging area, create it here outtemp_name = "ccpp_temp_scratch_dir" - outtemp_dir = os.path.join(output_dir, outtemp_name) + outtemp_dir = os.path.join(run_env.output_dir, outtemp_name) if os.path.exists(outtemp_dir): remove_dir(outtemp_dir, force=True) # end if os.makedirs(outtemp_dir) # end if - ccpp_api = API(sdfs, host_model, scheme_headers, logger) - cap_filenames = ccpp_api.write(outtemp_dir, logger) - if gen_hostcap: + ccpp_api = API(sdfs, host_model, scheme_headers, run_env) + cap_filenames = ccpp_api.write(outtemp_dir, run_env) + if run_env.generate_host_cap: # Create a cap file - host_files = [write_host_cap(host_model, ccpp_api, outtemp_dir, logger)] + host_files = [write_host_cap(host_model, ccpp_api, + outtemp_dir, run_env)] else: host_files = list() # end if # Create the kinds file - kinds_file = create_kinds_file(kind_phys, outtemp_dir, logger) + kinds_file = create_kinds_file(run_env, outtemp_dir) # Move any changed files to output_dir and remove outtemp_dir - move_modified_files(outtemp_dir, output_dir, - overwrite=force_overwrite, remove_src=True) + move_modified_files(outtemp_dir, run_env.output_dir, + overwrite=run_env.force_overwrite, remove_src=True) # We have to rename the files we created - if outtemp_dir != output_dir: - replace_paths(cap_filenames, outtemp_dir, output_dir) - replace_paths(host_files, outtemp_dir, output_dir) - kinds_file = kinds_file.replace(outtemp_dir, output_dir) + if outtemp_dir != run_env.output_dir: + replace_paths(cap_filenames, outtemp_dir, run_env.output_dir) + replace_paths(host_files, outtemp_dir, run_env.output_dir) + kinds_file = kinds_file.replace(outtemp_dir, run_env.output_dir) # end if # Finally, create the database of generated files and caps # This can be directly in output_dir because it will not affect dependencies src_dir = os.path.join(__FRAMEWORK_ROOT, "src") - generate_ccpp_datatable(datatable_file, host_model, ccpp_api, + generate_ccpp_datatable(run_env, host_model, ccpp_api, scheme_headers, scheme_tdict, host_files, cap_filenames, kinds_file, src_dir) @@ -728,50 +656,16 @@ def _main_func(): ############################################################################### """Parse command line, then parse indicated host, scheme, and suite files. Finally, generate code to allow host model to run indicated CCPP suites.""" - args = parse_command_line(sys.argv[1:], __doc__) - verbosity = args.verbose - if verbosity > 1: - set_log_level(_LOGGER, logging.DEBUG) - elif verbosity > 0: - set_log_level(_LOGGER, logging.INFO) - # end if - # Make sure we know where output is going - output_dir = os.path.abspath(args.output_root) - if os.path.abspath(args.ccpp_datafile): - datatable_file = args.ccpp_datafile - else: - datatable_file = os.path.abspath(os.path.join(output_dir, - args.ccpp_datafile)) - # end if - ## A few sanity checks - ## Make sure output directory is legit - if os.path.exists(output_dir): - if not os.path.isdir(output_dir): - errmsg = "output-root, '{}', is not a directory" - raise CCPPError(errmsg.format(args.output_root)) - # end if - if not os.access(output_dir, os.W_OK): - errmsg = "Cannot write files to output-root ({})" - raise CCPPError(errmsg.format(args.output_root)) - # end if (output_dir is okay) - else: - # Try to create output_dir (let it crash if it fails) - os.makedirs(output_dir) - # end if - # Make sure we can create output file lists - if not os.path.isabs(datatable_file): - datatable_file = os.path.normpath(os.path.join(output_dir, - datatable_file)) + framework_env = parse_command_line(sys.argv[1:], __doc__, logger=_LOGGER) + if framework_env.verbosity > 1: + set_log_level(framework_env.logger, logging.DEBUG) + elif framework_env.verbosity > 0: + set_log_level(framework_env.logger, logging.INFO) # end if - if args.clean: - clean_capgen(datatable_file, _LOGGER) + if framework_env.clean: + clean_capgen(framework_env.datatable_file, framework_env.logger) else: - generate_host_cap = args.host_name != '' - preproc_defs = args.preproc_directives - capgen(args.host_files, args.scheme_files, args.suites, datatable_file, - preproc_defs, generate_host_cap, - args.generate_docfiles, output_dir, args.host_name, - args.kind_phys, args.force_overwrite, _LOGGER) + capgen(framework_env) # end if (clean) ############################################################################### diff --git a/scripts/ccpp_datafile.py b/scripts/ccpp_datafile.py index b25df8a8..158d9dec 100755 --- a/scripts/ccpp_datafile.py +++ b/scripts/ccpp_datafile.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Code to generate and query the CCPP datafile returned by capgen. The CCPP datafile is a database consisting of several tables: @@ -17,15 +17,15 @@ # Python library imports import argparse +import logging import os -import re import sys import xml.etree.ElementTree as ET # CCPP framework imports -from ccpp_suite import VerticalLoop, Subcycle -from parse_tools import read_xml_file from metadata_table import UNKNOWN_PROCESS_TYPE from metavar import Var +from parse_tools import read_xml_file, PrettyElementTree +from suite_objects import VerticalLoop, Subcycle # Find python version PY3 = sys.version_info[0] > 2 @@ -33,9 +33,6 @@ # Global data _INDENT_STR = " " -beg_tag_re = re.compile(r"([<][^/][^<>]*[^/][>])") -end_tag_re = re.compile(r"([<][/][^<>/]+[>])") -simple_tag_re = re.compile(r"([<][^/][^<>/]+[/][>])") ## datatable_report must have an action for each report type _VALID_REPORTS = [{"report" : "host_files", "type" : bool, @@ -87,6 +84,13 @@ class CCPPDatatableError(ValueError): """Error specific to errors found in the CCPP capgen datafile""" pass +class DatatableInternalError(ValueError): + """Error class for reporting internal errors""" + def __init__(self, message): + """Initialize this exception""" + logging.shutdown() + super(DatatableInternalError, self).__init__(message) + class DatatableReport(object): """A class to hold a database report type and inquiry function""" @@ -121,96 +125,6 @@ def valid_actions(cls): """Return the list of valid actions for this class""" return cls.__valid_actions -class PrettyElementTree(ET.ElementTree): - """An ElementTree subclass with nice formatting when writing to a file""" - - def __init__(self, element=None, file=None): - """Initialize a PrettyElementTree object""" - super(PrettyElementTree, self).__init__(element, file) - - def _write(self, outfile, line, indent, eol=os.linesep): - """Write as an ASCII string to """ - outfile.write('{}{}{}'.format(_INDENT_STR*indent, line, eol)) - - def write(self, file, encoding="us-ascii", xml_declaration=None, - default_namespace=None, method="xml", - short_empty_elements=True): - """Subclassed write method to format output.""" - if PY3 and (PYSUBVER >= 4): - if PYSUBVER >= 8: - input = ET.tostring(self.getroot(), - encoding=encoding, method=method, - xml_declaration=xml_declaration, - default_namespace=default_namespace, - short_empty_elements=short_empty_elements) - else: - input = ET.tostring(self.getroot(), - encoding=encoding, method=method, - short_empty_elements=short_empty_elements) - # end if - else: - input = ET.tostring(self.getroot(), - encoding=encoding, method=method) - # end if - if PY3: - fmode = 'wt' - root = str(input, encoding="utf-8") - else: - fmode = 'w' - root = input - # end if - indent = 0 - last_write_text = False - with open(file, fmode) as outfile: - inline = root.strip() - istart = 0 # Current start pos - iend = len(inline) - while istart < iend: - bmatch = beg_tag_re.match(inline[istart:]) - ematch = end_tag_re.match(inline[istart:]) - smatch = simple_tag_re.match(inline[istart:]) - if bmatch is not None: - outstr = bmatch.group(1) - if inline[istart + len(bmatch.group(1))] != '<': - # Print text on same line - self._write(outfile, outstr, indent, eol='') - else: - self._write(outfile, outstr, indent) - # end if - indent += 1 - istart += len(outstr) - last_write_text = False - elif ematch is not None: - outstr = ematch.group(1) - indent -= 1 - if last_write_text: - self._write(outfile, outstr, 0) - last_write_text = False - else: - self._write(outfile, outstr, indent) - # end if - istart += len(outstr) - elif smatch is not None: - outstr = smatch.group(1) - self._write(outfile, outstr, indent) - istart += len(outstr) - last_write_text = False - else: - # No tag, just output text - end_index = inline[istart:].find('<') - if end_index < 0: - end_index = iend - else: - end_index += istart - # end if - outstr = inline[istart:end_index] - self._write(outfile, outstr.strip(), 0, eol='') - last_write_text = True - istart += len(outstr) - # end if - # end while - # end with - ### ### Interface for retrieving datatable information ### @@ -740,9 +654,10 @@ def _new_var_entry(parent, var, full_entry=True): """ prop_list = ["intent"] if full_entry: - prop_list.extend(["local_name", "type", "kind", "units", + prop_list.extend(["allocatable", "active", "default_value", "diagnostic_name", "diagnostic_name_fixed", - "default_value", "protected"]) + "kind", "persistence", "polymorphic", "protected", + "state_variable", "type", "units"]) prop_list.extend(Var.constituent_property_names()) # end if ventry = ET.SubElement(parent, "var") @@ -929,7 +844,7 @@ def _add_suite_object(parent, suite_object): # end for ############################################################################### -def generate_ccpp_datatable(filename, host_model, api, scheme_headers, +def generate_ccpp_datatable(run_env, host_model, api, scheme_headers, scheme_tdict, host_files, suite_files, ccpp_kinds, source_dir): ############################################################################### @@ -1007,7 +922,7 @@ def generate_ccpp_datatable(filename, host_model, api, scheme_headers, _add_dependencies(datatable, scheme_depends, host_depends) # Write tree datatable_tree = PrettyElementTree(datatable) - datatable_tree.write(filename) + datatable_tree.write(run_env.datatable_file) ############################################################################### diff --git a/scripts/ccpp_fortran_to_metadata.py b/scripts/ccpp_fortran_to_metadata.py index 155cea87..8fc1d682 100755 --- a/scripts/ccpp_fortran_to_metadata.py +++ b/scripts/ccpp_fortran_to_metadata.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 #pylint: disable=anomalous-backslash-in-string """ @@ -35,6 +35,7 @@ import os.path import logging # CCPP framework imports +from framework_env import CCPPFrameworkEnv from parse_tools import init_log, set_log_level from parse_tools import CCPPError, ParseInternalError from parse_tools import reset_standard_name_counter, unique_standard_name @@ -127,7 +128,7 @@ def write_metadata_file(mfilename, ftables, sep): tprop = var.get_prop_value('type') kprop = var.get_prop_value('kind') if tprop == kprop: - outfile.write(' ddt_type = {}'.format(tprop)) + outfile.write(' type = {}'.format(tprop)) else: outfile.write(' type = {}'.format(tprop.lower())) if kprop: @@ -166,7 +167,7 @@ def write_metadata_file(mfilename, ftables, sep): # end with ############################################################################### -def parse_fortran_files(filenames, preproc_defs, output_dir, sep, logger): +def parse_fortran_files(filenames, run_env, output_dir, sep, logger): ############################################################################### """ Parse each file in and produce a prototype metadata file @@ -176,8 +177,7 @@ def parse_fortran_files(filenames, preproc_defs, output_dir, sep, logger): for filename in filenames: logger.info('Looking for arg_tables from {}'.format(filename)) reset_standard_name_counter() - ftables = parse_fortran_file(filename, preproc_defs=preproc_defs, - logger=logger) + ftables = parse_fortran_file(filename, run_env) # Create metadata filename filepath = '.'.join(os.path.basename(filename).split('.')[0:-1]) fname = filepath + '.meta' @@ -225,7 +225,10 @@ def _main_func(): os.makedirs(output_dir) # end if # Parse the files and create metadata - _ = parse_fortran_files(fort_files, preproc_defs, + run_env = CCPPFrameworkEnv(_LOGGER, verbose=verbosity, + host_files="", scheme_files="", suites="", + preproc_directives=preproc_defs) + _ = parse_fortran_files(fort_files, run_env, output_dir, section_sep, _LOGGER) ############################################################################### diff --git a/scripts/ccpp_prebuild.py b/scripts/ccpp_prebuild.py index 863e4225..a14f1af7 100755 --- a/scripts/ccpp_prebuild.py +++ b/scripts/ccpp_prebuild.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Standard modules import argparse diff --git a/scripts/ccpp_suite.py b/scripts/ccpp_suite.py index a84cf037..4fca8e05 100644 --- a/scripts/ccpp_suite.py +++ b/scripts/ccpp_suite.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # """Classes and methods to create a Fortran suite-implementation file @@ -6,7 +6,7 @@ # Python library imports import os.path -import re +import logging import xml.etree.ElementTree as ET # CCPP framework imports from ccpp_state_machine import CCPP_STATE_MACH, RUN_PHASE_NAME @@ -15,11 +15,14 @@ from ddt_library import DDTLibrary from file_utils import KINDS_MODULE from fortran_tools import FortranWriter -from metavar import Var, VarDictionary, VarLoopSubst, ccpp_standard_var +from framework_env import CCPPFrameworkEnv +from metavar import Var, VarDictionary, ccpp_standard_var from metavar import CCPP_CONSTANT_VARS, CCPP_LOOP_VAR_STDNAMES from parse_tools import ParseContext, ParseSource, context_string from parse_tools import ParseInternalError, CCPPError from parse_tools import read_xml_file, validate_xml_file, find_schema_version +from parse_tools import init_log, set_log_to_null +from suite_objects import CallList, Group, Scheme # pylint: disable=too-many-lines @@ -27,1862 +30,30 @@ # Module (global) variables ############################################################################### -_OBJ_LOC_RE = re.compile(r"(0x[0-9A-Fa-f]+)>") -_BLANK_DIMS_RE = re.compile(r"[(][:](,:)*[)]$") - # Source for internally generated variables. _API_SOURCE_NAME = "CCPP_API" # Use the constituent source type for consistency _API_SUITE_VAR_NAME = ConstituentVarDict.constitutent_source_type() -_API_GROUP_VAR_NAME = "group" _API_SCHEME_VAR_NAME = "scheme" -_API_LOCAL_VAR_NAME = "local" -_API_LOCAL_VAR_TYPES = [_API_LOCAL_VAR_NAME, _API_SUITE_VAR_NAME] _API_CONTEXT = ParseContext(filename="ccpp_suite.py") _API_SOURCE = ParseSource(_API_SOURCE_NAME, _API_SCHEME_VAR_NAME, _API_CONTEXT) -_API_LOCAL = ParseSource(_API_SOURCE_NAME, _API_LOCAL_VAR_NAME, _API_CONTEXT) -_API_GROUP = ParseSource(_API_SOURCE_NAME, _API_GROUP_VAR_NAME, _API_CONTEXT) -_API_TIMESPLIT_TAG = 'time_split' -_API_PROCESSSPLIT_TAG = 'process_split' +_API_LOGGING = init_log('ccpp_suite') +set_log_to_null(_API_LOGGING) +_API_DUMMY_RUN_ENV = CCPPFrameworkEnv(_API_LOGGING, + ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) # Required variables for inclusion in auto-generated schemes -CCPP_REQUIRED_VARS = [ccpp_standard_var('ccpp_error_flag', +CCPP_REQUIRED_VARS = [ccpp_standard_var('ccpp_error_code', _API_SCHEME_VAR_NAME, + _API_DUMMY_RUN_ENV, context=_API_CONTEXT), ccpp_standard_var('ccpp_error_message', _API_SCHEME_VAR_NAME, + _API_DUMMY_RUN_ENV, context=_API_CONTEXT)] -############################################################################### -def new_suite_object(item, context, parent, logger): -############################################################################### - "'Factory' method to create the appropriate suite object from XML" - new_item = None - if item.tag == 'subcycle': - new_item = Subcycle(item, context, parent, logger) - elif item.tag == 'scheme': - new_item = Scheme(item, context, parent, logger) - elif item.tag == _API_TIMESPLIT_TAG: - new_item = TimeSplit(item, context, parent, logger) - else: - raise CCPPError("Unknown CCPP suite element type, '{}'".format(item.tag)) - # end if - return new_item - -############################################################################### - -class CallList(VarDictionary): - """A simple class to hold a routine's call list (dummy arguments)""" - - def __init__(self, name, routine=None, logger=None): - """Initialize this call list. - is the name of this dictionary. - is a pointer to the routine for which this is a call list - or None for a routine that is not a SuiteObject. - """ - self.__routine = routine - super(CallList, self).__init__(name, logger=logger) - - def add_vars(self, call_list, gen_unique=False): - """Add new variables from another CallList ()""" - for var in call_list.variable_list(): - stdname = var.get_prop_value('standard_name') - if stdname not in self: - self.add_variable(var, gen_unique=gen_unique) - # end if - # end for - - def call_string(self, cldicts=None, is_func_call=False, subname=None): - """Return a dummy argument string for this call list. - may be a list of VarDictionary objects to search for - local_names (default is to use self). - should be set to True to construct a call statement. - If is False, construct a subroutine dummy argument - list. - """ - arg_str = "" - arg_sep = "" - for var in self.variable_list(): - # Do not include constants - stdname = var.get_prop_value('standard_name') - if stdname not in CCPP_CONSTANT_VARS: - # Find the dummy argument name - dummy = var.get_prop_value('local_name') - # Now, find the local variable name - if cldicts is not None: - for cldict in cldicts: - dvar = cldict.find_variable(standard_name=stdname, - any_scope=False) - if dvar is not None: - break - # end if - # end for - if dvar is None: - if subname is not None: - errmsg = "{}: ".format(subname) - else: - errmsg = "" - # end if - errmsg += "'{}', not found in call list for '{}'" - clnames = [x.name for x in cldicts] - raise CCPPError(errmsg.format(stdname, clnames)) - # end if - lname = dvar.get_prop_value('local_name') - else: - cldict = None - aref = var.array_ref(local_name=dummy) - if aref is not None: - lname = aref.group(1) - else: - lname = dummy - # end if - # end if - if is_func_call: - if cldicts is not None: - use_dicts = cldicts - else: - use_dicts = [self] - # end if - run_phase = self.routine.run_phase() - # We only need dimensions for suite variables in run phase - need_dims = SuiteObject.is_suite_variable(dvar) and run_phase - vdims = var.call_dimstring(var_dicts=use_dicts, - explicit_dims=need_dims, - loop_subst=run_phase) - if _BLANK_DIMS_RE.match(vdims) is None: - lname = lname + vdims - # end if - # end if - if is_func_call: - arg_str += "{}{}={}".format(arg_sep, dummy, lname) - else: - arg_str += "{}{}".format(arg_sep, lname) - # end if - arg_sep = ", " - # end if - # end for - return arg_str - - @property - def routine(self): - """Return the routine for this call list (or None)""" - return self.__routine - -############################################################################### - -class SuiteObject(VarDictionary): - """Base class for all CCPP Suite objects (e.g., Scheme, Subcycle) - SuiteObjects have an internal dictionary for variables created for - execution of the SuiteObject. These variables will be allocated and - managed at the Group level (unless cross-group usage or persistence - requires handling at the Suite level). - SuiteObjects also have a call list which is a list of variables which - are passed to callable SuiteObjects (e.g., Scheme). - """ - - def __init__(self, name, context, parent, logger, - active_call_list=False, variables=None, phase_type=None): - # pylint: disable=too-many-arguments - self.__name = name - self.__context = context - self.__logger = logger - self.__parent = parent - if active_call_list: - self.__call_list = CallList(name + '_call_list', routine=self, - logger=logger) - else: - self.__call_list = None - # end if - self.__parts = list() - self.__needs_vertical = None - self.__needs_horizontal = None - self.__phase_type = phase_type - # Initialize our dictionary - super(SuiteObject, self).__init__(self.name, variables=variables, - parent_dict=parent, logger=logger) - - def declarations(self): - """Return a list of local variables to be declared in parent Group - or Suite. By default, this list is the object's embedded VarDictionary. - """ - return self.variable_list() - - def add_part(self, item, replace=False): - """Add an object (e.g., Scheme, Subcycle) to this SuiteObject. - If needs to be in a VerticalLoop, look for an appropriate - VerticalLoop object or create one. - if is True, replace in its current position in self. - Note that if is not to be inserted in a VerticalLoop, - has no effect. - """ - if replace: - if item in self.__parts: - index = self.__parts.index(item) - else: - emsg = 'Cannot replace {} in {}, not a member' - raise ParseInternalError(emsg.format(item.name, self.name)) - # end if - else: - if item in self.__parts: - emsg = 'Cannot add {} to {}, already a member' - raise ParseInternalError(emsg.format(item.name, self.name)) - # end if - index = len(self.__parts) - # end if - # Does this item need to be in a VerticalLoop? - if item.needs_vertical is not None: - iparent = item.parent - if isinstance(self, VerticalLoop): - # It is being added to a VerticalLoop, call it good - pass - elif isinstance(iparent, VerticalLoop): - # Why are we doing this? - emsg = ('Trying to add {} {} to {} {} but it is already ' - 'in VerticalLoop {}') - raise ParseInternalError(emsg.format(item.__class__.__name__, - item.name, - self.__class__.__name__, - self.name, iparent.name)) - else: - pitem = iparent.part(-1, error=False) - added = False - if isinstance(pitem, VerticalLoop): - # Can we attach item to this loop? - if pitem.dimension_name == item.needs_vertical: - pitem.add_part(item) - if replace: - self.remove_part(index) - # end if (no else, we already added it) - added = True - # end if - # end if - if not added: - # Need to add item to a new VerticalLoop - # We are in the process of providing the vertical coord - vert_index = item.needs_vertical - item.needs_vertical = None - new_vl = VerticalLoop(vert_index, self.__context, - self, self.__logger, items=[item]) - if replace: - self.remove_part(index) - # end if (no else, adding the loop below) - self.__parts.insert(index, new_vl) - item.reset_parent(new_vl) - # end if - # end if - else: - # Just add - self.__parts.insert(index, item) - item.reset_parent(self) - # end if - - def remove_part(self, index): - """Remove the SuiteObject part at index""" - plen = len(self.__parts) - if (0 <= index < plen) or (abs(index) <= plen): - del self.__parts[index] - else: - errmsg = "Invalid index for remove_part, {}, ".format(index) - if plen > 0: - errmsg += "SuiteObject only has {} parts".format(plen) - else: - errmsg += "SuiteObject only has no parts" - raise ParseInternalError(errmsg, context=self.__context) - # end if - - def schemes(self): - """Return a flattened list of schemes for this SuiteObject""" - schemes = list() - for item in self.__parts: - schemes.extend(item.schemes()) - # end for - return schemes - - def move_part(self, part, source_object, loc=-1): - """Operator to move from to . - If is -1, is appended to , - otherwise, is inserted at . - """ - if part in source_object.parts: - # Sanitize loc - try: - iloc = int(loc) - except ValueError: - errmsg = "Invalid loc value for move_part, {}".format(loc) - raise ParseInternalError(errmsg, context=self.__context) - # end try - if iloc == -1: - self.__parts.append(part) - else: - self.__parts.insert(iloc, part) - # end if - index = source_object.index(part) - source_object.remove_part(index) - # now has a new parent - part.reset_parent(self) - - def reset_parent(self, new_parent): - """Reset the parent of this SuiteObject (which has been moved)""" - self.__parent = new_parent - - def phase(self): - """Return the CCPP state phase_type for this SuiteObject""" - trans = self.phase_type - if trans is None: - if self.parent is not None: - trans = self.parent.phase() - else: - trans = False - # end if - # end if - return trans - - def run_phase(self): - """Return True iff this SuiteObject is in a run phase group""" - return self.phase() == RUN_PHASE_NAME - - def timestep_phase(self): - '''Return True iff this SuiteObject is in a timestep initial or - timestep final phase group''' - phase = self.phase() - return (phase is not None) and ('timestep' in phase) - - def register_action(self, vaction): - """Register (i.e., save information for processing during write stage) - and return True or pass up to the parent of - . Return True if any level registers , False otherwise. - The base class will not register any action, it must be registered in - an override of this method. - """ - if self.parent is not None: - return self.parent.register_action(vaction) - # end if - return False - - @classmethod - def is_suite_variable(cls, var): - """Return True iff belongs to our Suite""" - return var and (var.source.type == _API_SUITE_VAR_NAME) - - def is_local_variable(self, var): - """Return the local variable matching if one is found belonging - to this object or any of its SuiteObject parents.""" - stdname = var.get_prop_value('standard_name') - lvar = None - obj = self - while (not lvar) and (obj is not None) and isinstance(obj, SuiteObject): - lvar = obj.find_variable(standard_name=stdname, any_scope=False, - search_call_list=False) - if not lvar: - obj = obj.parent - # end if - # end while - return lvar - - def add_call_list_variable(self, newvar, exists_ok=False, gen_unique=False, - subst_dict=None): - """Add to this SuiteObject's call_list. If this SuiteObject - does not have a call list, recursively try the SuiteObject's parent - If is not None, create a clone using that as a dictionary - of substitutions. - Do not add if it exists as a local variable. - Do not add if it is a suite variable""" - stdname = newvar.get_prop_value('standard_name') - if self.parent: - pvar = self.parent.find_variable(standard_name=stdname, - source_var=newvar, - any_scope=False) - else: - pvar = None - # end if - if SuiteObject.is_suite_variable(pvar): - pass # Do not add suite variable to a call list - elif self.is_local_variable(newvar): - pass # Do not add to call list, it is owned by a SuiteObject - elif self.call_list is not None: - if (stdname in CCPP_LOOP_VAR_STDNAMES) and (not self.run_phase()): - errmsg = 'Attempting to use loop variable {} in {} phase' - raise CCPPError(errmsg.format(stdname, self.phase())) - # end if - # Do we need a clone? - if isinstance(self, Group): - stype = _API_GROUP_VAR_NAME - else: - stype = None - # end if - if stype or subst_dict: - oldvar = newvar - if subst_dict is None: - subst_dict = {} - # end if - # Make sure that this variable has an intent - if ((oldvar.get_prop_value("intent") is None) and - ("intent" not in subst_dict)): - subst_dict["intent"] = "in" - # end if - newvar = oldvar.clone(subst_dict, source_name=self.name, - source_type=stype, context=self.context) - # end if - self.call_list.add_variable(newvar, exists_ok=exists_ok, - gen_unique=gen_unique, - adjust_intent=True) - # We need to make sure that this variable's dimensions are available - for vardim in newvar.get_dim_stdnames(include_constants=False): - dvar = self.find_variable(standard_name=vardim, - any_scope=True) - if dvar is None: - emsg = "{}: Could not find dimension {} in {}" - raise ParseInternalError(emsg.format(self.name, - stdname, vardim)) - # end if - elif self.parent is None: - errmsg = 'No call_list found for {}'.format(newvar) - raise ParseInternalError(errmsg) - elif pvar: - # Check for call list incompatibility - if pvar is not None: - compat, reason = pvar.compatible(newvar) - if not compat: - emsg = 'Attempt to add incompatible variable to call list:' - emsg += '\n{} from {} is not compatible with {} from {}' - nlreason = newvar.get_prop_value(reason) - plreason = pvar.get_prop_value(reason) - emsg += '\nreason = {} ({} != {})'.format(reason, - nlreason, - plreason) - nlname = newvar.get_prop_value('local_name') - plname = pvar.get_prop_value('local_name') - raise CCPPError(emsg.format(nlname, newvar.source.name, - plname, pvar.source.name)) - # end if - # end if (no else, variable already in call list) - else: - self.parent.add_call_list_variable(newvar, exists_ok=exists_ok, - gen_unique=gen_unique, - subst_dict=subst_dict) - # end if - - def add_variable_to_call_tree(self, var, vmatch=None, subst_dict=None): - """Add to 's call_list (or a parent if does not - have an active call_list). - If is not None, also add the loop substitution variables - which must be present. - If is not None, create a clone using that as a dictionary - of substitutions. - """ - found_dims = False - if var is not None: - self.add_call_list_variable(var, exists_ok=True, gen_unique=True, - subst_dict=subst_dict) - found_dims = True - # end if - if vmatch is not None: - svars = vmatch.has_subst(self, any_scope=True) - if svars is None: - found_dims = False - else: - found_dims = True - for svar in svars: - self.add_call_list_variable(svar, exists_ok=True) - # end for - # Register the action (probably at Group level) - self.register_action(vmatch) - # end if - # end if - return found_dims - - def vert_dim_match(self, vloop_subst): - """If self is or is a part of a VerticalLoop object for - the substitute index for , return the substitute - loop index standard name, otherwise, return None. - """ - dim_match = None - parent = self - if len(vloop_subst.required_stdnames) != 1: - errmsg = 'vert_dim_match can only handle one substitute index' - raise ParseInternalError(errmsg) - # end if - index_dim = vloop_subst.required_stdnames[0] - while parent is not None: - if isinstance(parent, VerticalLoop) and (parent.name == index_dim): - dim_match = index_dim - break - # end if - parent = parent.parent - # end for - return dim_match - - def horiz_dim_match(self, ndim, hdim, nloop_subst): - """Find a match between and , if they are both - horizontal dimensions. - If == , return . - If is not None and its required standard names exist - in our extended dictionary, return them. - Otherwise, return None. - NB: Loop substitutions are only allowed during the run phase but in - other phases, horizontal_dimension and horizontal_loop_extent - are the same. - """ - dim_match = None - nis_hdim = Var.is_horizontal_dimension(ndim) - his_hdim = Var.is_horizontal_dimension(hdim) - if nis_hdim and his_hdim: - if ndim == hdim: - dim_match = ndim - elif self.run_phase() and (nloop_subst is not None): - svars = nloop_subst.has_subst(self, any_scope=True) - match = svars is not None - if match: - if isinstance(self, Scheme): - obj = self.parent - else: - obj = self - # end if - for svar in svars: - obj.add_call_list_variable(svar, exists_ok=True) - # end for - dim_match = ':'.join(nloop_subst.required_stdnames) - # end if - elif not self.run_phase(): - if ((hdim == 'ccpp_constant_one:horizontal_dimension') and - (ndim == 'ccpp_constant_one:horizontal_loop_extent')): - dim_match = hdim - elif ((hdim == 'ccpp_constant_one:horizontal_dimension') and - (ndim == 'horizontal_loop_begin:horizontal_loop_end')): - dim_match = hdim - # end if (no else, there is no non-run-phase match) - # end if (no else, there is no match) - # end if (no else, there is no match) - return dim_match - - @staticmethod - def dim_match(need_dim, have_dim): - """Test whether matches . - If they match, return the matching dimension (which may be - modified by, e.g., a loop substitution). - If they do not match, return None. - """ - match = None - # First, try for all the marbles - if need_dim == have_dim: - match = need_dim - # end if - # Is one side missing a one start? - if not match: - ndims = need_dim.split(':') - hdims = have_dim.split(':') - if len(ndims) > len(hdims): - if ndims[0].lower == 'ccpp_constant_one': - ndims = ndims[1:] - elif hdims[0].lower == 'ccpp_constant_one': - hdims = hdims[1:] - # end if (no else) - # Last try - match = ndims == hdims - # end if - # end if - - return match - - def match_dimensions(self, need_dims, have_dims): - """Compare dimensions between and . - Return 6 items: - 1) Return True if all dims match. - If has a vertical dimension and does not - but all other dimensions match, return False but include the - missing dimension index as the third return value. - 2) Return modified, if necessary to - reflect the available limits. - 3) Return have_dims modified, if necessary to reflect - any loop substitutions. If no substitutions, return None - This is done so that the correct dimensions are used in the host cap. - 4) Return the name of the missing vertical index, or None - 5) Return a permutation array if the dimension ordering is - different (or None if the ordering is the same). Each element of the - permutation array is the index in for that dimension of - . - 6) Finally, return a 'reason' string. If match (first return value) is - False, this string will contain information about the reason for - the match failure. - >>> SuiteObject('foo', _API_CONTEXT, None, None).match_dimensions(['horizontal_loop_extent'], ['horizontal_loop_extent']) - (True, ['horizontal_loop_extent'], ['horizontal_loop_extent'], None, None, '') - >>> SuiteObject('foo', _API_CONTEXT,None, None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type='initialize').match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['ccpp_constant_one:horizontal_dimension']) - (True, ['ccpp_constant_one:horizontal_dimension'], ['ccpp_constant_one:horizontal_dimension'], None, None, '') - >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['horizontal_loop_begin:horizontal_loop_end']) - (True, ['horizontal_loop_begin:horizontal_loop_end'], ['horizontal_loop_begin:horizontal_loop_end'], None, None, '') - >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['horizontal_loop_begin:horizontal_loop_end','ccpp_constant_one:vertical_layer_dimension']) - (False, ['horizontal_loop_begin:horizontal_loop_end', 'vertical_layer_index'], ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], 'vertical_layer_index', None, 'missing vertical dimension') - >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent','ccpp_constant_one:vertical_layer_dimension'], ['horizontal_loop_begin:horizontal_loop_end','ccpp_constant_one:vertical_layer_dimension']) - (True, ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], None, None, '') - >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent','ccpp_constant_one:vertical_layer_dimension'], ['ccpp_constant_one:vertical_layer_dimension','horizontal_loop_begin:horizontal_loop_end']) - (True, ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], ['ccpp_constant_one:vertical_layer_dimension', 'horizontal_loop_begin:horizontal_loop_end'], None, [1, 0], '') - """ - new_need_dims = [] - new_have_dims = list(have_dims) - perm = [] - match = True - missing_vert_dim = None - reason = '' - nlen = len(need_dims) - hlen = len(have_dims) - _, nvdim_index = Var.find_vertical_dimension(need_dims) - _, hvdim_index = Var.find_vertical_dimension(have_dims) - _, nhdim_index = Var.find_horizontal_dimension(need_dims) - _, hhdim_index = Var.find_horizontal_dimension(have_dims) - if hhdim_index < 0 <= nhdim_index: - match = False - nlen = 0 # To skip logic below - hlen = 0 # To skip logic below - reason = '{hname}{hctx} is missing a horizontal dimension ' - reason += 'required by {nname}{nctx}' - # end if - for nindex in range(nlen): - neddim = need_dims[nindex] - if nindex == nhdim_index: - # Look for a horizontal dimension match - vmatch = VarDictionary.loop_var_match(neddim) - hmatch = self.horiz_dim_match(neddim, have_dims[hhdim_index], - vmatch) - if hmatch: - perm.append(hhdim_index) - new_need_dims.append(hmatch) - new_have_dims[hhdim_index] = hmatch - found_ndim = True - else: - found_ndim = False - # end if - else: - # Find the first dimension in have_dims that matches neddim - found_ndim = False - if nvdim_index < 0 <= hvdim_index: - skip = hvdim_index - else: - skip = -1 - # end if - hdim_indices = [x for x in range(hlen) - if (x not in perm) and (x != skip)] - for hindex in hdim_indices: - if (hindex != hvdim_index) or (nvdim_index >= 0): - hmatch = self.dim_match(neddim, have_dims[hindex]) - if hmatch: - perm.append(hindex) - new_need_dims.append(hmatch) - new_have_dims[hindex] = hmatch - found_ndim = True - break - # end if - # end if - # end if - # end for - if not found_ndim: - match = False - reason = 'Could not find dimension, ' + neddim + ', in ' - reason += '{hname}{hctx}. Needed by {nname}{nctx}' - break - # end if (no else, we are still okay) - # end for - # Find a missing vertical dimension index, if necessary - if nvdim_index < 0 <= hvdim_index: - # We need to make a substitution for the vertical - # coordinate in have_dims - vvmatch = VarDictionary.loop_var_match(have_dims[hvdim_index]) - if vvmatch: - vmatch_dims = ':'.join(vvmatch.required_stdnames) - # See if the missing vertical dimensions exist - missing_vert_dim = None - for mstdname in vvmatch.required_stdnames: - mvdim = self.find_variable(standard_name=mstdname, - any_scope=True) - if not mvdim: - missing_vert_dim = vmatch_dims - match = False # Should trigger vertical loop action - reason = 'missing vertical dimension' - break - # end if - # end for - # While we have a missing vertical dimension which has been - # created, do NOT enter the substitution into have_dims. - # The supplied variable still has a vertical dimension. - # On the other hand, we *do* need to add the new vertical - # loop index to new_need_dims. Try to put it in the correct - # place for easy calling from the existing variable. - # Also update perm to match the array access - if hvdim_index < len(new_need_dims): - # Insert the vertical loop dimension - if hvdim_index > 0: - before = new_need_dims[0:hvdim_index] - perm_before = perm[0:hvdim_index] - else: - before = [] - perm_before = [] - # end if - after = new_need_dims[hvdim_index:] - new_need_dims = before + [vmatch_dims] + after - perm = perm_before + [hvdim_index] + perm[hvdim_index:] - else: - new_need_dims.append(vmatch_dims) - perm.append(hvdim_index) - # end if - else: - emsg = "Unknown vertical dimension dimension, '{}'" - raise CCPPError(emsg.format(have_dims[hvdim_index])) - # end if - else: - missing_vert_dim = None - # end if - perm_test = list(range(hlen)) - # If no permutation is found, reset to None - if perm == perm_test: - perm = None - elif (not match) and (missing_vert_dim is None): - perm = None - # end if (else, return perm as is) - if new_have_dims == have_dims: - have_dims = None # Do not make any substitutions - # end if - return match, new_need_dims, new_have_dims, missing_vert_dim, perm, reason - - def find_variable(self, standard_name=None, source_var=None, - any_scope=True, clone=None, - search_call_list=False, loop_subst=False): - """Find a matching variable to , create a local clone (if - is True), or return None. - First search the SuiteObject's internal dictionary, then its - call list (unless is True, then any parent - dictionary (if is True). - can be a Var object or a standard_name string. - is not used by this version of . - """ - # First, search our local dictionary - if standard_name is None: - if source_var is None: - emsg = "One of or must be passed." - raise ParseInternalError(emsg) - # end if - standard_name = source_var.get_prop_value('standard_name') - elif source_var is not None: - stest = source_var.get_prop_value('standard_name') - if stest != standard_name: - emsg = (" and must match if " + - "both are passed.") - raise ParseInternalError(emsg) - # end if - # end if - scl = search_call_list - stdname = standard_name - # Don't clone yet, might find the variable further down - found_var = super(SuiteObject, - self).find_variable(standard_name=stdname, - source_var=source_var, - any_scope=False, clone=None, - search_call_list=scl, - loop_subst=loop_subst) - if (not found_var) and (self.call_list is not None) and scl: - # Don't clone yet, might find the variable further down - found_var = self.call_list.find_variable(standard_name=stdname, - source_var=source_var, - any_scope=False, - clone=None, - search_call_list=scl, - loop_subst=loop_subst) - # end if - loop_okay = VarDictionary.loop_var_okay(stdname, self.run_phase()) - if not loop_okay: - loop_subst = False - # end if - if (found_var is None) and any_scope and (self.parent is not None): - # We do not have the variable, look to parents. - found_var = self.parent.find_variable(standard_name=stdname, - source_var=source_var, - any_scope=True, - clone=clone, - search_call_list=scl, - loop_subst=loop_subst) - # end if - return found_var - - def match_variable(self, var, vstdname=None, vdims=None): - """Try to find a source for in this SuiteObject's dictionary - tree. Several items are returned: - found_var: True if a match was found - vert_dim: The vertical dimension in , or None - call_dims: How this variable should be called (or None if no match) - missing_vert: Vertical dim in parent but not in - perm: Permutation (XXgoldyXX: Not yet implemented) - """ - if vstdname is None: - vstdname = var.get_prop_value('standard_name') - # end if - if vdims is None: - vdims = var.get_dimensions() - # end if - if (not vdims) and self.run_phase(): - vmatch = VarDictionary.loop_var_match(vstdname) - else: - vmatch = None - # end if - found_var = False - missing_vert = None - new_vdims = list() - var_vdim = var.has_vertical_dimension(dims=vdims) - # Does this variable exist in the calling tree? - dict_var = self.find_variable(source_var=var, any_scope=True) - if dict_var is None: - # No existing variable but add loop var match to call tree - found_var = self.parent.add_variable_to_call_tree(dict_var, - vmatch=vmatch) - new_vdims = vdims - elif dict_var.source.type in _API_LOCAL_VAR_TYPES: - # We cannot change the dimensions of locally-declared variables - # Using a loop substitution is invalid because the loop variable - # value has not yet been set. - # Therefore, we have to use the declaration dimensions in the call. - found_var = True - new_vdims = dict_var.get_dimensions() - else: - # Check dimensions - dict_dims = dict_var.get_dimensions() - if vdims: - args = self.parent.match_dimensions(vdims, dict_dims) - match, new_vdims, new_dict_dims, missing_vert, perm, err = args - if perm is not None: - errmsg = "Permuted indices are not yet supported" - lname = var.get_prop_value('local_name') - dstr = ', '.join(vdims) - ctx = context_string(var.context) - errmsg += ", var = {}({}){}".format(lname, dstr, ctx) - raise CCPPError(errmsg) - # end if - else: - new_vdims = list() - new_dict_dims = dict_dims - match = True - # end if - # Add the variable to the parent call tree - if dict_dims == new_dict_dims: - sdict = {} - else: - sdict = {'dimensions':new_dict_dims} - # end if - found_var = self.parent.add_variable_to_call_tree(var, - subst_dict=sdict) - if not match: - found_var = False - if not missing_vert: - nctx = context_string(var.context) - nname = var.get_prop_value('local_name') - hctx = context_string(dict_var.context) - hname = dict_var.get_prop_value('local_name') - raise CCPPError(err.format(nname=nname, nctx=nctx, - hname=hname, hctx=hctx)) - # end if - # end if - # end if - # end if - return found_var, var_vdim, new_vdims, missing_vert - - def in_process_split(self): - """Find out if we are in a process-split region""" - proc_split = False - obj = self - while obj is not None: - if isinstance(obj, ProcessSplit): - proc_split = True - break - # end if - if isinstance(obj, TimeSplit): - break - # end if (other object types do not change status) - obj = obj.parent - # end while - return proc_split - - def part(self, index, error=True): - """Return one of this SuiteObject's parts raise an exception, or, - if is False, just return None""" - plen = len(self.__parts) - if (0 <= index < plen) or (abs(index) <= plen): - return self.__parts[index] - # end if - if error: - errmsg = 'No part {} in {} {}'.format(index, - self.__class__.__name__, - self.name) - raise ParseInternalError(errmsg) - # end if - return None - - def has_item(self, item_name): - """Return True iff item, , is already in this SuiteObject""" - has = False - for item in self.__parts: - if item.name == item_name: - has = True - else: - has = item.has_item(item_name) - # end if - if has: - break - # end if - # end for - return has - - @property - def name(self): - """Return the name of the element""" - return self.__name - - @name.setter - def name(self, value): - """Set the name of the element if it has not been set""" - if self.__name is None: - self.__name = value - else: - errmsg = 'Attempt to change name of {} to {}' - raise ParseInternalError(errmsg.format(self, value)) - # end if - - @property - def parent(self): - """This SuiteObject's parent (or none)""" - return self.__parent - - @property - def call_list(self): - """Return the SuiteObject's call_list""" - return self.__call_list - - @property - def phase_type(self): - """Return the phase_type of this suite_object""" - return self.__phase_type - - @property - def parts(self): - """Return a copy the component parts of this SuiteObject. - Returning a copy allows for the part list to be changed during - processing of the return value""" - return self.__parts[:] - - @property - def needs_vertical(self): - """Return the vertical dimension this SuiteObject is missing or None""" - return self.__needs_vertical - - @property - def context(self): - """Return the context of this SuiteObject""" - return self.__context - - @needs_vertical.setter - def needs_vertical(self, value): - """Reset the missing vertical dimension of this SuiteObject""" - if value is None: - self.__needs_vertical = value - elif self.__needs_vertical is not None: - if self.__needs_vertical != value: - errmsg = ('Attempt to change missing vertical dimension ' - 'from {} to {}') - raise ParseInternalError(errmsg.format(self.__needs_vertical, - value)) - # end if (no else, value is already correct) - else: - self.__needs_vertical = value - # end if - - def __repr__(self): - """Create a unique readable string for this Object""" - so_repr = super(SuiteObject, self).__repr__() - olmatch = _OBJ_LOC_RE.search(so_repr) - if olmatch is not None: - loc = ' at {}'.format(olmatch.group(1)) - else: - loc = "" - # end if - return '<{} {}{}>'.format(self.__class__.__name__, self.name, loc) - - def __format__(self, spec): - """Return a string representing the SuiteObject, including its children. - is used between subitems. - is the indent level for multi-line output. - """ - if spec: - sep = spec[0] - else: - sep = '\n' - # end if - try: - ind_level = int(spec[1:]) - except (ValueError, IndexError): - ind_level = 0 - # end try - if sep == '\n': - indent = " " - else: - indent = "" - # end if - if self.name == self.__class__.__name__: - # This object does not have separate name - nstr = self.name - else: - nstr = "{}: {}".format(self.__class__.__name__, self.name) - # end if - output = "{}<{}>".format(indent*ind_level, nstr) - subspec = "{}{}".format(sep, ind_level + 1) - substr = "{o}{s}{p:" + subspec + "}" - subout = "" - for part in self.parts: - subout = substr.format(o=subout, s=sep, p=part) - # end for - if subout: - output = "{}{}{}{}".format(output, subout, sep, - indent*ind_level, - self.__class__.__name__) - else: - output = "{}".format(output, self.__class__.__name__) - # end if - return output - -############################################################################### - -class Scheme(SuiteObject): - """A single scheme in a suite (e.g., init method)""" - - def __init__(self, scheme_xml, context, parent, logger): - """Initialize this physics Scheme""" - name = scheme_xml.text - self.__subroutine_name = None - self.__context = context - self.__version = scheme_xml.get('version', None) - self.__lib = scheme_xml.get('lib', None) - self.__has_vertical_dimension = False - self.__group = None - super(Scheme, self).__init__(name, context, parent, - logger, active_call_list=True) - - def update_group_call_list_variable(self, var): - """If is in our group's call list, update its intent. - Add to our group's call list unless: - - is in our group's call list - - is in our group's dictionary, - - is a suite variable""" - stdname = var.get_prop_value('standard_name') - my_group = self.__group - gvar = my_group.call_list.find_variable(standard_name=stdname, - any_scope=False) - if gvar: - gvar.adjust_intent(var) - else: - gvar = my_group.find_variable(standard_name=stdname, - any_scope=False) - if gvar is None: - # Check for suite variable - gvar = my_group.find_variable(standard_name=stdname, - any_scope=True) - if gvar and (not SuiteObject.is_suite_variable(gvar)): - gvar = None - # end if - if gvar is None: - my_group.add_call_list_variable(var) - # end if - # end if - - def is_local_variable(self, var): - """Return None as we never consider to be in our local - dictionary. - This is an override of the SuiteObject version""" - return None - - def analyze(self, phase, group, scheme_library, suite_vars, level, logger): - """Analyze the scheme's interface to prepare for writing""" - self.__group = group - my_header = None - if self.name in scheme_library: - func = scheme_library[self.name] - if phase in func: - my_header = func[phase] - self.__subroutine_name = my_header.title - # end if - else: - estr = 'No schemes found for {}' - raise ParseInternalError(estr.format(self.name), - context=self.__context) - # end if - if my_header is None: - estr = 'No {} header found for scheme, {}' - raise ParseInternalError(estr.format(phase, self.name), - context=self.__context) - # end if - if my_header.module is None: - estr = 'No module found for subroutine, {}' - raise ParseInternalError(estr.format(self.subroutine_name), - context=self.__context) - # end if - scheme_mods = set() - scheme_mods.add((my_header.module, self.subroutine_name)) - for var in my_header.variable_list(): - vstdname = var.get_prop_value('standard_name') - def_val = var.get_prop_value('default_value') - vdims = var.get_dimensions() - vintent = var.get_prop_value('intent') - args = self.match_variable(var, vstdname=vstdname, vdims=vdims) - found, vert_dim, new_dims, missing_vert = args - if found: - if not self.has_vertical_dim: - self.__has_vertical_dimension = vert_dim is not None - # end if - # We have a match, make sure var is in call list - if new_dims == vdims: - self.add_call_list_variable(var, exists_ok=True) - self.update_group_call_list_variable(var) - else: - subst_dict = {'dimensions':new_dims} - clone = var.clone(subst_dict) - self.add_call_list_variable(clone, exists_ok=True) - self.update_group_call_list_variable(clone) - # end if - else: - if missing_vert is not None: - # This Scheme needs to be in a VerticalLoop - self.needs_vertical = missing_vert - break # Deal with this and come back - # end if - if vintent == 'out': - if self.__group is None: - errmsg = 'Group not defined for {}'.format(self.name) - raise ParseInternalError(errmsg) - # end if - # The Group will manage this variable - self.__group.manage_variable(var) - self.add_call_list_variable(var) - elif def_val and (vintent != 'out'): - if self.__group is None: - errmsg = 'Group not defined for {}'.format(self.name) - raise ParseInternalError(errmsg) - # end if - # The Group will manage this variable - self.__group.manage_variable(var) - # We still need it in our call list (the group uses a clone) - self.add_call_list_variable(var) - else: - errmsg = 'Input argument for {}, {}, not found.' - if self.find_variable(source_var=var) is not None: - # The variable exists, maybe it is dim mismatch - lname = var.get_prop_value('local_name') - emsg = '\nCheck for dimension mismatch in {}' - errmsg += emsg.format(lname) - # end if - if ((not self.run_phase()) and - (vstdname in CCPP_LOOP_VAR_STDNAMES)): - emsg = '\nLoop variables not allowed in {} phase.' - errmsg += emsg.format(self.phase()) - # end if - raise CCPPError(errmsg.format(self.subroutine_name, - vstdname)) - # end if - # end if - # end for - if self.needs_vertical is not None: - self.parent.add_part(self, replace=True) # Should add a vloop - if isinstance(self.parent, VerticalLoop): - # Restart the loop analysis - scheme_mods = self.parent.analyze(phase, group, scheme_library, - suite_vars, level, logger) - # end if - # end if - return scheme_mods - - def write(self, outfile, logger, errflg, indent): - # Unused arguments are for consistent write interface - # pylint: disable=unused-argument - """Write code to call this Scheme to """ - # Dictionaries to try are our group, the group's call list, - # or our module - cldicts = [self.__group, self.__group.call_list] - cldicts.extend(self.__group.suite_dicts()) - my_args = self.call_list.call_string(cldicts=cldicts, - is_func_call=True, - subname=self.subroutine_name) - stmt = 'call {}({})' - outfile.write('if ({} == 0) then'.format(errflg), indent) - outfile.write(stmt.format(self.subroutine_name, my_args), indent+1) - outfile.write('end if', indent) - - def schemes(self): - """Return self as a list for consistency with subcycle""" - return [self] - - def variable_list(self, recursive=False, - std_vars=True, loop_vars=True, consts=True): - """Return a list of all variables for this Scheme. - Because Schemes do not have any variables, return a list - of this object's CallList variables instead. - Note that because of this, is not allowed.""" - if recursive: - raise ParseInternalError("recursive=True not allowed for Schemes") - # end if - return self.call_list.variable_list(recursive=recursive, - std_vars=std_vars, - loop_vars=loop_vars, consts=consts) - - @property - def subroutine_name(self): - """Return this scheme's actual subroutine name""" - return self.__subroutine_name - - @property - def has_vertical_dim(self): - """Return True if at least one of this Scheme's variables has - a vertical dimension (vertical_layer_dimension or - vertical_interface_dimension) - """ - return self.__has_vertical_dimension - - def __str__(self): - """Create a readable string for this Scheme""" - return ''.format(self.name, self.subroutine_name) - -############################################################################### - -class VerticalLoop(SuiteObject): - """Class to call a group of schemes or scheme collections in a - loop over a vertical dimension.""" - - def __init__(self, index_name, context, parent, logger, items=None): - """ is the standard name of the variable holding the - number of iterations (e.g., vertical_layer_dimension).""" - # self._dim_name is the standard name for the number of iterations - self._dim_name = VarDictionary.find_loop_dim_from_index(index_name) - if self._dim_name is None: - errmsg = 'No VerticalLoop dimension name for index = {}' - raise ParseInternalError(errmsg.format(index_name)) - # end if - if ':' in self._dim_name: - dims = self._dim_name.split(':') - if not dims[1]: - errmsg = 'Invalid loop dimension, {}' - raise ParseInternalError(errmsg.format(self._dim_name)) - # end if - self._dim_name = dims[1] - # end if - # self._local_dim_name is the variable name for self._dim_name - self._local_dim_name = None - super(VerticalLoop, self).__init__(index_name, context, parent, logger) - logger.debug("Adding VerticalLoop for '{}'".format(index_name)) - # Add any items - if not isinstance(items, list): - if items is None: - items = list() - else: - items = [items] - # end if - # end if - for item in items: - self.add_part(item) - # end for - - def analyze(self, phase, group, scheme_library, suite_vars, level, logger): - """Analyze the VerticalLoop's interface to prepare for writing""" - # Handle all the suite objects inside of this subcycle - scheme_mods = set() - # Create a variable for the loop index - newvar = Var({'local_name':self.name, 'standard_name':self.name, - 'type':'integer', 'units':'count', 'dimensions':'()'}, - _API_LOCAL) - # The Group will manage this variable - group.manage_variable(newvar) - # Find the loop-extent variable - dim_name = self._dim_name - local_dim = group.find_variable(standard_name=dim_name, any_scope=False) - if local_dim is None: - local_dim = group.call_list.find_variable(standard_name=dim_name, - any_scope=False) - # end if - if local_dim is None: - emsg = 'No variable found for vertical loop dimension {}' - raise ParseInternalError(emsg.format(self._dim_name)) - # end if - self._local_dim_name = local_dim.get_prop_value('local_name') - emsg = "VerticalLoop local name for '{}'".format(self.name) - emsg += " is '{}".format(self.dimension_name) - logger.debug(emsg) - # Analyze our internal items - for item in self.parts: - smods = item.analyze(phase, group, scheme_library, - suite_vars, level+1, logger) - for smod in smods: - scheme_mods.add(smod) - # end for - # end for - return scheme_mods - - def write(self, outfile, logger, errflg, indent): - """Write code for the vertical loop, including contents, to """ - outfile.write('do {} = 1, {}'.format(self.name, self.dimension_name), - indent) - # Note that 'scheme' may be a sybcycle or other construct - for item in self.parts: - item.write(outfile, logger, errflg, indent+1) - # end for - outfile.write('end do', 2) - - @property - def dimension_name(self): - """Return the vertical dimension over which this VerticalLoop loops""" - return self._local_dim_name - -############################################################################### - -class Subcycle(SuiteObject): - """Class to represent a subcycled group of schemes or scheme collections""" - - def __init__(self, sub_xml, context, parent, logger): - name = sub_xml.get('name', None) # Iteration count - loop_extent = sub_xml.get('loop', "1") # Number of iterations - # See if our loop variable is an interger or a variable - try: - loop_int = int(loop_extent) # pylint: disable=unused-variable - self._loop = loop_extent - self._loop_var_int = True - except ValueError: - self._loop_var_int = False - lvar = parent.find_variable(standard_name=self.loop, any_scope=True) - if lvar is None: - emsg = "Subcycle, {}, specifies {} iterations but {} not found" - raise CCPPError(emsg.format(name, self.loop, self.loop)) - # end if - parent.add_call_list_variable(lvar) - # end try - super(Subcycle, self).__init__(name, context, parent, logger) - for item in sub_xml: - new_item = new_suite_object(item, context, self, logger) - self.add_part(new_item) - # end for - - def analyze(self, phase, group, scheme_library, suite_vars, level, logger): - """Analyze the Subcycle's interface to prepare for writing""" - if self.name is None: - self.name = "subcycle_index{}".format(level) - # end if - # Create a variable for the loop index - self.add_variable(Var({'local_name':self.name, - 'standard_name':'loop_variable', - 'type':'integer', 'units':'count', - 'dimensions':'()'}, _API_SOURCE)) - # Handle all the suite objects inside of this subcycle - scheme_mods = set() - for item in self.parts: - smods = item.analyze(phase, group, scheme_library, - suite_vars, level+1, logger) - for smod in smods: - scheme_mods.add(smod) - # end for - # end for - return scheme_mods - - def write(self, outfile, logger, errflg, indent): - """Write code for the subcycle loop, including contents, to """ - outfile.write('do {} = 1, {}'.format(self.name, self.loop), indent) - # Note that 'scheme' may be a sybcycle or other construct - for item in self.parts: - item.write(outfile, logger, errflg, indent+1) - # end for - outfile.write('end do', 2) - - @property - def loop(self): - """Return the loop value or variable local_name""" - lvar = self.find_variable(standard_name=self.loop, any_scope=True) - if lvar is None: - emsg = "Subcycle, {}, specifies {} iterations but {} not found" - raise CCPPError(emsg.format(self.name, self.loop, self.loop)) - # end if - lname = lvar.get_prop_value('local_name') - return lname - -############################################################################### - -class TimeSplit(SuiteObject): - """Class to represent a group of processes to be computed in a time-split - manner -- each parameterization or other construct is called with an - state which has been updated from the previous step. - """ - - def __init__(self, sub_xml, context, parent, logger): - super(TimeSplit, self).__init__('TimeSplit', context, parent, logger) - for part in sub_xml: - new_item = new_suite_object(part, context, self, logger) - self.add_part(new_item) - # end for - - def analyze(self, phase, group, scheme_library, suite_vars, level, logger): - # Unused arguments are for consistent analyze interface - # pylint: disable=unused-argument - """Analyze the TimeSplit's interface to prepare for writing""" - # Handle all the suite objects inside of this group - scheme_mods = set() - for item in self.parts: - smods = item.analyze(phase, group, scheme_library, - suite_vars, level+1, logger) - for smod in smods: - scheme_mods.add(smod) - # end for - # end for - return scheme_mods - - def write(self, outfile, logger, errflg, indent): - """Write code for this TimeSplit section, including contents, - to """ - for item in self.parts: - item.write(outfile, logger, errflg, indent) - # end for - -############################################################################### - -class ProcessSplit(SuiteObject): - """Class to represent a group of processes to be computed in a - process-split manner -- all parameterizations or other constructs are - called with the same state. - NOTE: Currently a stub - """ - - def __init__(self, sub_xml, context, parent, logger): - # Unused arguments are for consistent __init__ interface - # pylint: disable=unused-argument - super(ProcessSplit, self).__init__('ProcessSplit', context, - parent, logger) - raise CCPPError('ProcessSplit not yet implemented') - - def analyze(self, phase, group, scheme_library, suite_vars, level, logger): - # Unused arguments are for consistent analyze interface - # pylint: disable=unused-argument - """Analyze the ProcessSplit's interface to prepare for writing""" - # Handle all the suite objects inside of this group - raise CCPPError('ProcessSplit not yet implemented') - - def write(self, outfile, logger, errflg, indent): - """Write code for this ProcessSplit section, including contents, - to """ - raise CCPPError('ProcessSplit not yet implemented') - -############################################################################### - -class Group(SuiteObject): - """Class to represent a grouping of schemes in a suite - A Group object is implemented as a subroutine callable by the API. - The main arguments to a group are the host model variables. - Additional output arguments are generated from schemes with intent(out) - arguments. - Additional input or inout arguments are generated for inputs needed by - schemes which are produced (intent(out)) by other groups. - """ - - __subhead = ''' - subroutine {subname}({args}) -''' - - __subend = ''' - end subroutine {subname} - -! ======================================================================== -''' - - __thread_check = CodeBlock([('#ifdef _OPENMP', -1), - ('if (omp_get_thread_num() > 1) then', 1), - ('{errflg} = 1', 2), - (('{errmsg} = "Cannot call {phase} routine ' - 'from a threaded region"'), 2), - ('return', 2), - ('end if', 1), - ('#endif', -1)]) - - __process_types = [_API_TIMESPLIT_TAG, _API_PROCESSSPLIT_TAG] - - __process_xml = {} - for gptype in __process_types: - __process_xml[gptype] = '<{ptype}>'.format(ptype=gptype) - # end for - - def __init__(self, group_xml, transition, parent, context, logger): - """Initialize this Group object from . - is the group's phase, is the group's suite. - """ - name = parent.name + '_' + group_xml.get('name') - if transition not in CCPP_STATE_MACH.transitions(): - errmsg = "Bad transition argument to Group, '{}'" - raise ParseInternalError(errmsg.format(transition)) - # end if - # Initialize the dictionary of variables internal to group - super(Group, self).__init__(name, context, parent, - logger, active_call_list=True, - phase_type=transition) - # Add the items but first make sure we know the process type for - # the group (e.g., TimeSplit or ProcessSplit). - if (transition == RUN_PHASE_NAME) and ((not group_xml) or - (group_xml[0].tag not in - Group.__process_types)): - # Default is TimeSplit - tsxml = ET.fromstring(Group.__process_xml[_API_TIMESPLIT_TAG]) - time_split = new_suite_object(tsxml, context, self, logger) - add_to = time_split - self.add_part(time_split) - else: - add_to = self - # end if - # Add the sub objects either directly to the Group or to the TimeSplit - for item in group_xml: - new_item = new_suite_object(item, context, add_to, logger) - add_to.add_part(new_item) - # end for - self._local_schemes = set() - self._host_vars = None - self._host_ddts = None - self._loop_var_matches = list() - self._phase_check_stmts = list() - self._set_state = None - self._ddt_library = None - - def phase_match(self, scheme_name): - """If scheme_name matches the group phase, return the group and - function ID. Otherwise, return None - """ - fid, tid, _ = CCPP_STATE_MACH.transition_match(scheme_name, - transition=self.phase()) - if tid is not None: - return self, fid - # end if - return None, None - - def move_to_call_list(self, standard_name): - """Move a variable from the group internal dictionary to the call list. - This is done when the variable, , will be allocated by - the suite. - """ - gvar = self.find_variable(standard_name=standard_name, any_scope=False) - if gvar is None: - errmsg = "Group {}, cannot move {}, variable not found" - raise ParseInternalError(errmsg.format(self.name, standard_name)) - # end if - self.add_call_list_variable(gvar, exists_ok=True) - self.remove_variable(standard_name) - - def register_action(self, vaction): - """Register any recognized type for use during self.write. - Return True iff is handled. - """ - if isinstance(vaction, VarLoopSubst): - self._loop_var_matches = vaction.add_to_list(self._loop_var_matches) - # Add the missing dim - vaction.add_local(self, _API_LOCAL) - return True - # end if - return False - - def manage_variable(self, newvar): - """Add to our local dictionary making necessary - modifications to the variable properties so that it is - allocated appropriately""" - # Need new prop dict to eliminate unwanted properties (e.g., intent) - vdims = newvar.get_dimensions() - # Look for dimensions where we have a loop substitution and replace - # with the correct size - if self.run_phase(): - hdims = [x.missing_stdname for x in self._loop_var_matches] - else: - # Do not do loop substitutions in full phases - hdims = list() - # end if - for index, dim in enumerate(vdims): - newdim = None - for subdim in dim.split(':'): - if subdim in hdims: - # We have a loop substitution, find and replace - hindex = hdims.index(subdim) - names = self._loop_var_matches[hindex].required_stdnames - newdim = ':'.join(names) - break - # end if - if ('vertical' in subdim) and ('index' in subdim): - # We have a vertical index, replace with correct dimension - errmsg = "vertical index replace not implemented" - raise ParseInternalError(errmsg) - # end if - # end for - if newdim is not None: - vdims[index] = newdim - # end if - # end for - if self.timestep_phase(): - persist = 'timestep' - else: - persist = 'run' - # end if - # Start with an official copy of 's prop_dict with - # corrected dimensions - subst_dict = {'dimensions':vdims} - prop_dict = newvar.copy_prop_dict(subst_dict=subst_dict) - # Add the allocatable items - prop_dict['allocatable'] = len(vdims) > 0 # No need to allocate scalar - prop_dict['persistence'] = persist - # This is a local variable - if 'intent' in prop_dict: - del prop_dict['intent'] - # end if - # Create a new variable, save the original context - local_var = Var(prop_dict, ParseSource(_API_SOURCE_NAME, - _API_LOCAL_VAR_NAME, - newvar.context)) - self.add_variable(local_var, exists_ok=True) - # Finally, make sure all dimensions are accounted for - emsg = self.add_variable_dimensions(local_var, _API_LOCAL_VAR_TYPES, - adjust_intent=True, - to_dict=self.call_list) - if emsg: - raise CCPPError(emsg) - # end if - - def analyze(self, phase, suite_vars, scheme_library, ddt_library, logger): - """Analyze the Group's interface to prepare for writing""" - self._ddt_library = ddt_library - # Sanity check for Group - if phase != self.phase(): - errmsg = 'Group {} has phase {} but analyze is phase {}' - raise ParseInternalError(errmsg.format(self.name, - self.phase(), phase)) - # end if - for item in self.parts: - # Items can be schemes, subcycles or other objects - # All have the same interface and return a set of module use - # statements (lschemes) - lschemes = item.analyze(phase, self, scheme_library, - suite_vars, 1, logger) - for lscheme in lschemes: - self._local_schemes.add(lscheme) - # end for - # end for - self._phase_check_stmts = Suite.check_suite_state(phase) - self._set_state = Suite.set_suite_state(phase) - logger.debug("{}".format(self)) - - def allocate_dim_str(self, dims, context): - """Create the dimension string for an allocate statement""" - rdims = list() - for dim in dims: - rdparts = list() - dparts = dim.split(':') - for dpart in dparts: - dvar = self.find_variable(standard_name=dpart, any_scope=False) - if dvar is None: - dvar = self.call_list.find_variable(standard_name=dpart, - any_scope=False) - if dvar is None: - emsg = "Dimension variable, '{}', not found{}" - lvar = self.find_local_name(dpart, any_scope=True) - if lvar is not None: - emsg += "\nBe sure to use standard names!" - # end if - ctx = context_string(context) - raise CCPPError(emsg.format(dpart, ctx)) - # end if - lname = dvar.get_prop_value('local_name') - rdparts.append(lname) - # end for - rdims.append(':'.join(rdparts)) - # end for - return ', '.join(rdims) - - def find_variable(self, standard_name=None, source_var=None, - any_scope=True, clone=None, - search_call_list=False, loop_subst=False): - """Find a matching variable to , create a local clone (if - is True), or return None. - This purpose of this special Group version is to record any constituent - variable found for processing during the write phase. - """ - fvar = super(Group, - self).find_variable(standard_name=standard_name, - source_var=source_var, - any_scope=any_scope, clone=clone, - search_call_list=search_call_list, - loop_subst=loop_subst) - if fvar and fvar.is_constituent(): - if fvar.source.type == ConstituentVarDict.constitutent_source_type(): - # We found this variable in the constituent dictionary, - # add it to our call list - self.add_call_list_variable(fvar, exists_ok=True) - # end if - # end if - return fvar - - def write(self, outfile, logger, host_arglist, indent, const_mod, - suite_vars=None, allocate=False, deallocate=False): - """Write code for this subroutine (Group), including contents, - to """ - # Unused arguments are for consistent write interface - # pylint: disable=unused-argument - # group type for (de)allocation - if self.timestep_phase(): - group_type = 'timestep' # Just allocate for the timestep - else: - group_type = 'run' # Allocate for entire run - # end if - # Collect information on local variables - subpart_vars = {} - allocatable_var_set = set() - for item in [self]:# + self.parts: - for var in item.declarations(): - lname = var.get_prop_value('local_name') - if lname in subpart_vars: - if subpart_vars[lname][0].compatible(var): - pass # We already are going to declare this variable - else: - errmsg = "Duplicate Group variable, {}" - raise ParseInternalError(errmsg.format(lname)) - # end if - else: - subpart_vars[lname] = (var, item) - dims = var.get_dimensions() - if (dims is not None) and dims: - allocatable_var_set.add(lname) - # end if - # end if - # end for - # end for - # First, write out the subroutine header - subname = self.name - call_list = self.call_list.call_string() - outfile.write(Group.__subhead.format(subname=subname, args=call_list), - indent) - # Write out any use statements - if self._local_schemes: - modmax = max([len(s[0]) for s in self._local_schemes]) - else: - modmax = 0 - # end if - # Write out the scheme use statements - scheme_use = 'use {},{} only: {}' - for scheme in self._local_schemes: - smod = scheme[0] - sname = scheme[1] - slen = ' '*(modmax - len(smod)) - outfile.write(scheme_use.format(smod, slen, sname), indent+1) - # end for - # Look for any DDT types - call_vars = self.call_list.variable_list() - self._ddt_library.write_ddt_use_statements(call_vars, outfile, - indent+1, pad=modmax) - decl_vars = [x[0] for x in subpart_vars.values()] - self._ddt_library.write_ddt_use_statements(decl_vars, outfile, - indent+1, pad=modmax) - outfile.write('', 0) - # Write out dummy arguments - outfile.write('! Dummy arguments', indent+1) - msg = 'Variables for {}: ({})' - logger.debug(msg.format(self.name, call_vars)) - self.call_list.declare_variables(outfile, indent+1, dummy=True) - if subpart_vars: - outfile.write('\n! Local Variables', indent+1) - # Write out local variables - for key in subpart_vars: - var = subpart_vars[key][0] - spdict = subpart_vars[key][1] - var.write_def(outfile, indent+1, spdict, - allocatable=(key in allocatable_var_set)) - # end for - outfile.write('', 0) - # Get error variable names - verrflg = self.find_variable(standard_name='ccpp_error_flag', - any_scope=True) - if verrflg is not None: - errflg = verrflg.get_prop_value('local_name') - else: - errmsg = "No ccpp_error_flag variable for group, {}" - raise CCPPError(errmsg.format(self.name)) - # end if - verrmsg = self.find_variable(standard_name='ccpp_error_message', - any_scope=True) - if verrmsg is not None: - errmsg = verrmsg.get_prop_value('local_name') - else: - errmsg = "No ccpp_error_message variable for group, {}" - raise CCPPError(errmsg.format(self.name)) - # end if - # Initialize error variables - outfile.write("{} = 0".format(errflg), 2) - outfile.write("{} = ''".format(errmsg), 2) - # Output threaded region check (except for run phase) - if not self.run_phase(): - Group.__thread_check.write(outfile, indent, - {'phase' : self.phase(), - 'errflg' : errflg, 'errmsg' : errmsg}) - # Check state machine - self._phase_check_stmts.write(outfile, indent, - {'errflg' : errflg, 'errmsg' : errmsg, - 'funcname' : self.name}) - # Allocate local arrays - alloc_stmt = "allocate({}({}))" - for lname in allocatable_var_set: - var = subpart_vars[lname][0] - dims = var.get_dimensions() - alloc_str = self.allocate_dim_str(dims, var.context) - outfile.write(alloc_stmt.format(lname, alloc_str), indent+1) - # end for - # Allocate suite vars - if allocate: - for svar in suite_vars.variable_list(): - dims = svar.get_dimensions() - if dims: - timestep_var = svar.get_prop_value('persistence') - if group_type == timestep_var: - alloc_str = self.allocate_dim_str(dims, svar.context) - lname = svar.get_prop_value('local_name') - outfile.write(alloc_stmt.format(lname, alloc_str), - indent+1) - # end if (do not allocate in this phase) - # end if dims (do not allocate scalars) - # end for - # end if - # Write any loop match calculations - for vmatch in self._loop_var_matches: - action = vmatch.write_action(self, dict2=self.call_list) - if action: - outfile.write(action, indent+1) - # end if - # end for - # Write the scheme and subcycle calls - for item in self.parts: - item.write(outfile, logger, errflg, indent + 1) - # end for - # Deallocate local arrays - for lname in allocatable_var_set: - outfile.write('deallocate({})'.format(lname), indent+1) - # end for - # Deallocate suite vars - if deallocate: - for svar in suite_vars.variable_list(): - dims = svar.get_dimensions() - if dims: - timestep_var = svar.get_prop_value('persistence') - if group_type == timestep_var: - lname = svar.get_prop_value('local_name') - outfile.write('deallocate({})'.format(lname), indent+1) - # end if - # end if (no else, do not deallocate scalars) - # end for - # end if - self._set_state.write(outfile, indent, {}) - # end if - outfile.write(Group.__subend.format(subname=subname), indent) - - @property - def suite(self): - """Return this Group's suite""" - return self.parent - - def suite_dicts(self): - """Return a list of this Group's Suite's dictionaries""" - return self.suite.suite_dicts() - ############################################################################### class Suite(VarDictionary): @@ -1908,51 +79,49 @@ class Suite(VarDictionary): __scheme_template = '{}' - def __init__(self, filename, api, logger): + def __init__(self, filename, api, run_env): """Initialize this Suite object from the SDF, . serves as the Suite's parent.""" - self.__logger = logger - self._name = None - self._sdf_name = filename - self._groups = list() - self._suite_init_group = None - self._suite_final_group = None - self._timestep_init_group = None - self._timestep_final_group = None + self.__run_env = run_env + self.__name = None + self.__sdf_name = filename + self.__groups = list() + self.__suite_init_group = None + self.__suite_final_group = None + self.__timestep_init_group = None + self.__timestep_final_group = None self.__context = None - self._host_arg_list_full = None - self._host_arg_list_noloop = None - self._module = None - self._ddt_library = None + self.__host_arg_list_full = None + self.__host_arg_list_noloop = None + self.__module = None + self.__ddt_library = None # Full phases/groups are special groups where the entire state is passed - self._full_groups = {} + self.__full_groups = {} self._full_phases = {} - self._gvar_stdnames = {} # Standard names of group-created vars + self.__gvar_stdnames = {} # Standard names of group-created vars # Initialize our dictionary # Create a 'parent' to hold the constituent variables # The parent for the constituent dictionary is the API. temp_name = os.path.splitext(os.path.basename(filename))[0] const_dict = ConstituentVarDict(temp_name+'_constituents', - parent_dict=api, - logger=logger) - super(Suite, self).__init__(self.sdf_name, parent_dict=const_dict, - logger=logger) - if not os.path.exists(self._sdf_name): + api, run_env) + super().__init__(self.sdf_name, run_env, parent_dict=const_dict) + if not os.path.exists(self.__sdf_name): emsg = "Suite definition file {0} not found." - raise CCPPError(emsg.format(self._sdf_name)) + raise CCPPError(emsg.format(self.__sdf_name)) # end if # Parse the SDF - self.parse() + self.parse(run_env) @property def name(self): """Get the name of the suite.""" - return self._name + return self.__name @property def sdf_name(self): """Get the name of the suite definition file.""" - return self._sdf_name + return self.__sdf_name @classmethod def check_suite_state(cls, stage): @@ -1964,7 +133,7 @@ def check_suite_state(cls, stage): css = "trim({})".format(Suite.__state_machine_var_name) prev_str = "({} /= '{}')".format(css, prev_state) check_stmts.append(("if {} then".format(prev_str), 1)) - check_stmts.append(("{errflg} = 1", 2)) + check_stmts.append(("{errcode} = 1", 2)) errmsg_str = "write({errmsg}, '(3a)') " errmsg_str += "\"Invalid initial CCPP state, '\", " + css + ', ' errmsg_str += "\"' in {funcname}\"" @@ -1990,70 +159,74 @@ def set_suite_state(cls, phase): # end if return CodeBlock([(stmt, 1)]) - def new_group(self, group_string, transition): + def new_group(self, group_string, transition, run_env): """Create a new Group object from the a XML description""" if isinstance(group_string, str): gxml = ET.fromstring(group_string) else: gxml = group_string # end if - group = Group(gxml, transition, self, self.__context, self.__logger) + group = Group(gxml, transition, self, self.__context, run_env) for svar in CCPP_REQUIRED_VARS: group.add_call_list_variable(svar) # end for if transition != RUN_PHASE_NAME: - self._full_groups[group.name] = group + self.__full_groups[group.name] = group self._full_phases[group.phase()] = group # end if return group - def new_group_from_name(self, group_name): + def new_group_from_name(self, group_name, run_env): '''Create an XML string for Group, , and use it to create the corresponding group. Note: must be the a transition string''' group_xml = ''.format(group_name) - return self.new_group(group_xml, group_name) + return self.new_group(group_xml, group_name, run_env) - def parse(self): + def parse(self, run_env): """Parse the suite definition file.""" success = True - _, suite_xml = read_xml_file(self._sdf_name, self.__logger) + _, suite_xml = read_xml_file(self.__sdf_name, run_env.logger) # We do not have line number information for the XML file - self.__context = ParseContext(filename=self._sdf_name) + self.__context = ParseContext(filename=self.__sdf_name) # Validate the XML file version = find_schema_version(suite_xml) - res = validate_xml_file(self._sdf_name, 'suite', version, self.__logger) + res = validate_xml_file(self.__sdf_name, 'suite', version, + run_env.logger) if not res: emsg = "Invalid suite definition file, '{}'" - raise CCPPError(emsg.format(self._sdf_name)) + raise CCPPError(emsg.format(self.__sdf_name)) # end if - self._name = suite_xml.get('name') - self._module = 'ccpp_{}_cap'.format(self.name) + self.__name = suite_xml.get('name') + self.__module = 'ccpp_{}_cap'.format(self.name) lmsg = "Reading suite definition file for '{}'" - self.__logger.info(lmsg.format(self.name)) + if run_env.logger and run_env.logger.isEnabledFor(logging.INFO): + run_env.logger.info(lmsg.format(self.name)) + # end if gname = Suite.__initial_group_name - self._suite_init_group = self.new_group_from_name(gname) + self.__suite_init_group = self.new_group_from_name(gname, run_env) gname = Suite.__final_group_name - self._suite_final_group = self.new_group_from_name(gname) + self.__suite_final_group = self.new_group_from_name(gname, run_env) gname = Suite.__timestep_initial_group_name - self._timestep_init_group = self.new_group_from_name(gname) + self.__timestep_init_group = self.new_group_from_name(gname, run_env) gname = Suite.__timestep_final_group_name - self._timestep_final_group = self.new_group_from_name(gname) + self.__timestep_final_group = self.new_group_from_name(gname, run_env) # Set up some groupings for later efficiency - self._beg_groups = [self._suite_init_group.name, - self._timestep_init_group.name] - self._end_groups = [self._suite_final_group.name, - self._timestep_final_group.name] + self._beg_groups = [self.__suite_init_group.name, + self.__timestep_init_group.name] + self._end_groups = [self.__suite_final_group.name, + self.__timestep_final_group.name] # Build hierarchical structure as in SDF - self._groups.append(self._suite_init_group) - self._groups.append(self._timestep_init_group) + self.__groups.append(self.__suite_init_group) + self.__groups.append(self.__timestep_init_group) for suite_item in suite_xml: item_type = suite_item.tag.lower() # Suite item is a group or a suite-wide init or final method if item_type == 'group': # Parse a group - self._groups.append(self.new_group(suite_item, RUN_PHASE_NAME)) + self.__groups.append(self.new_group(suite_item, RUN_PHASE_NAME, + run_env)) else: match_trans = CCPP_STATE_MACH.function_match(item_type) if match_trans is None: @@ -2063,15 +236,15 @@ def parse(self): if match_trans in self._full_phases: # Parse a suite-wide initialization scheme scheme = Scheme(suite_item, self.__context, - self, self.__logger) + self, run_env) self._full_phases[match_trans].add_item(scheme) else: emsg = "Unhandled CCPP suite component tag type, '{}'" raise ParseInternalError(emsg.format(match_trans)) # end if # end for - self._groups.append(self._timestep_final_group) - self._groups.append(self._suite_final_group) + self.__groups.append(self.__timestep_final_group) + self.__groups.append(self.__suite_final_group) return success def suite_dicts(self): @@ -2082,12 +255,12 @@ def suite_dicts(self): @property def module(self): """Get the list of the module generated for this suite.""" - return self._module + return self.__module @property def groups(self): """Get the list of groups in this suite.""" - return self._groups + return self.__groups def find_variable(self, standard_name=None, source_var=None, any_scope=True, clone=None, @@ -2106,16 +279,16 @@ def find_variable(self, standard_name=None, source_var=None, """ # First, see if the variable is already in our path srch_clist = search_call_list - var = super(Suite, self).find_variable(standard_name=standard_name, - source_var=source_var, - any_scope=any_scope, - clone=None, - search_call_list=srch_clist, - loop_subst=loop_subst) + var = super().find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, + clone=None, + search_call_list=srch_clist, + loop_subst=loop_subst) if var is None: # No dice? Check for a group variable which can be promoted - if standard_name in self._gvar_stdnames: - group = self._gvar_stdnames[standard_name] + if standard_name in self.__gvar_stdnames: + group = self.__gvar_stdnames[standard_name] var = group.find_variable(standard_name=standard_name, source_var=source_var, any_scope=False, @@ -2124,12 +297,12 @@ def find_variable(self, standard_name=None, source_var=None, if var is not None: # Promote variable to suite level # Remove this entry to avoid looping back here - del self._gvar_stdnames[standard_name] + del self.__gvar_stdnames[standard_name] # Let everyone know this is now a Suite variable var.source = ParseSource(_API_SOURCE_NAME, _API_SUITE_VAR_NAME, var.context) - self.add_variable(var) + self.add_variable(var, self.__run_env) # Remove the variable from the group group.remove_variable(standard_name) else: @@ -2141,14 +314,13 @@ def find_variable(self, standard_name=None, source_var=None, # end if if (var is None) and (clone is not None): # Guess it is time to clone a different variable - var = super(Suite, self).find_variable(standard_name=standard_name, - source_var=source_var, - any_scope=any_scope, - clone=clone) + var = super().find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, clone=clone) # end if return var - def analyze(self, host_model, scheme_library, ddt_library, logger): + def analyze(self, host_model, scheme_library, ddt_library, run_env): """Collect all information needed to write a suite file >>> CCPP_STATE_MACH.transition_match('init') 'initialize' @@ -2215,7 +387,7 @@ def analyze(self, host_model, scheme_library, ddt_library, logger): >>> CCPP_STATE_MACH.function_match('foo_timestep_finalize') ('foo', 'timestep_finalize', 'timestep_final') """ - self._ddt_library = ddt_library + self.__ddt_library = ddt_library # Collect all relevant schemes # For all groups, find associated init and final methods scheme_set = set() @@ -2239,27 +411,30 @@ def analyze(self, host_model, scheme_library, ddt_library, logger): if not pgroup.has_item(header.title): sstr = Suite.__scheme_template.format(module) sxml = ET.fromstring(sstr) - scheme = Scheme(sxml, self.__context, pgroup, - self.__logger) + scheme = Scheme(sxml, self.__context, pgroup, run_env) pgroup.add_part(scheme) # end if (no else, scheme is already in group) # end if (no else, phase not in scheme set) # end for # end for # Grab the host model argument list - self._host_arg_list_full = host_model.argument_list() - self._host_arg_list_noloop = host_model.argument_list(loop_vars=False) + self.__host_arg_list_full = host_model.argument_list() + self.__host_arg_list_noloop = host_model.argument_list(loop_vars=False) # First pass, create init, run, and finalize sequences for item in self.groups: - if item.name in self._full_groups: - phase = self._full_groups[item.name].phase() + if item.name in self.__full_groups: + phase = self.__full_groups[item.name].phase() else: phase = RUN_PHASE_NAME # end if lmsg = "Group {}, schemes = {}" - self.__logger.debug(lmsg.format(item.name, - [x.name for x in item.schemes()])) - item.analyze(phase, self, scheme_library, ddt_library, logger) + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): + run_env.logger.debug(lmsg.format(item.name, + [x.name + for x in item.schemes()])) + item.analyze(phase, self, scheme_library, ddt_library, + self.check_suite_state(phase), + self.set_suite_state(phase)) # Look for group variables that need to be promoted to the suite # We need to promote any variable used later to the suite, however, # we do not yet know if it will be used. @@ -2267,8 +442,8 @@ def analyze(self, host_model, scheme_library, ddt_library, logger): gvars = item.variable_list() for gvar in gvars: stdname = gvar.get_prop_value('standard_name') - if not stdname in self._gvar_stdnames: - self._gvar_stdnames[stdname] = item + if not stdname in self.__gvar_stdnames: + self.__gvar_stdnames[stdname] = item # end if # end for # end for @@ -2311,12 +486,14 @@ def constituent_dictionary(self): """Return the constituent dictionary for this suite""" return self.parent - def write(self, output_dir, logger): + def write(self, output_dir, run_env): """Create caps for all groups in the suite and for the entire suite (calling the group caps one after another)""" # Set name of module and filename of cap filename = '{module_name}.F90'.format(module_name=self.module) - logger.debug('Writing CCPP suite file, {}'.format(filename)) + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): + run_env.logger.debug('Writing CCPP suite file, {}'.format(filename)) + # end if # Retrieve the name of the constituent module for Group use statements const_mod = self.parent.constituent_module_name() # Init @@ -2327,8 +504,8 @@ def write(self, output_dir, logger): # Write module 'use' statements here outfile.write('use {}'.format(KINDS_MODULE), 1) # Look for any DDT types - self._ddt_library.write_ddt_use_statements(self.values(), - outfile, 1) + self.__ddt_library.write_ddt_use_statements(self.values(), + outfile, 1) # Write out constituent module use statement(s) const_dict = self.constituent_dictionary() const_dict.write_suite_use(outfile, 1) @@ -2339,7 +516,7 @@ def write(self, output_dir, logger): var_state = Suite.__state_machine_initial_state outfile.write(line.format(css_var_name=var_name, state=var_state), 1) - for group in self._groups: + for group in self.__groups: outfile.write('public :: {}'.format(group.name), 1) # end for # Declare constituent public interfaces @@ -2351,15 +528,15 @@ def write(self, output_dir, logger): self[svar].write_def(outfile, 1, self, allocatable=True) # end for outfile.end_module_header() - for group in self._groups: + for group in self.__groups: if group.name in self._beg_groups: - group.write(outfile, logger, self._host_arg_list_noloop, 1, - const_mod, suite_vars=self, allocate=True) + group.write(outfile, self.__host_arg_list_noloop, + 1, const_mod, suite_vars=self, allocate=True) elif group.name in self._end_groups: - group.write(outfile, logger, self._host_arg_list_noloop, 1, - const_mod, suite_vars=self, deallocate=True) + group.write(outfile, self.__host_arg_list_noloop, + 1, const_mod, suite_vars=self, deallocate=True) else: - group.write(outfile, logger, self._host_arg_list_full, 1, + group.write(outfile, self.__host_arg_list_full, 1, const_mod) # end if # end for @@ -2380,6 +557,7 @@ class API(VarDictionary): __suite_fname = 'ccpp_physics_suite_list' __part_fname = 'ccpp_physics_suite_part_list' __vars_fname = 'ccpp_physics_suite_variables' + __schemes_fname = 'ccpp_physics_suite_schemes' __file_desc = "API for {host_model} calls to CCPP suites" @@ -2399,37 +577,45 @@ class API(VarDictionary): 'standard_name':'suite_name', 'intent':'in', 'type':'character', 'kind':'len=*', 'units':'', - 'dimensions':'()'}, _API_SOURCE) + 'dimensions':'()'}, _API_SOURCE, _API_DUMMY_RUN_ENV) __suite_part = Var({'local_name':'suite_part', 'standard_name':'suite_part', 'intent':'in', 'type':'character', 'kind':'len=*', 'units':'', - 'dimensions':'()'}, _API_SOURCE) - - def __init__(self, sdfs, host_model, scheme_headers, logger): - """Initialize this API""" + 'dimensions':'()'}, _API_SOURCE, _API_DUMMY_RUN_ENV) + + def __init__(self, sdfs, host_model, scheme_headers, run_env): + """Initialize this API. + is the list of Suite Definition Files to be parsed for + data needed by the CCPP cap. + is a HostModel object to reference for host model + variables. + is the list of parsed physics scheme metadata files. + Every scheme referenced by an SDF in MUST be in this list, + however, unused schemes are allowed. + is the CCPPFrameworkEnv object for this framework run. + """ self.__module = 'ccpp_physics_api' self.__host = host_model self.__suites = list() - super(API, self).__init__(self.module, parent_dict=self.host_model, - logger=logger) + super().__init__(self.module, run_env, parent_dict=self.host_model) # Create a usable library out of scheme_headers # Structure is dictionary of dictionaries # Top-level dictionary is keyed by function name # Secondary level is by phase scheme_library = {} # First, process DDT headers - self._ddt_lib = DDTLibrary('{}_api'.format(self.host_model.name), - ddts=[d for d in scheme_headers - if d.header_type == 'ddt'], - logger=logger) + self.__ddt_lib = DDTLibrary('{}_api'.format(self.host_model.name), + run_env, ddts=[d for d in scheme_headers + if d.header_type == 'ddt']) for header in [d for d in scheme_headers if d.header_type != 'ddt']: if header.header_type != 'scheme': errmsg = "{} is an unknown CCPP API metadata header type, {}" raise CCPPError(errmsg.format(header.title, header.header_type)) # end if - func_id, _, match_trans = CCPP_STATE_MACH.function_match(header.title) + func_id, _, match_trans = \ + CCPP_STATE_MACH.function_match(header.title) if func_id not in scheme_library: scheme_library[func_id] = {} # end if @@ -2443,11 +629,12 @@ def __init__(self, sdfs, host_model, scheme_headers, logger): # end for # Turn the SDF files into Suites for sdf in sdfs: - suite = Suite(sdf, self, logger) - suite.analyze(self.host_model, scheme_library, self._ddt_lib, logger) + suite = Suite(sdf, self, run_env) + suite.analyze(self.host_model, scheme_library, + self.__ddt_lib, run_env) self.__suites.append(suite) # end for - # We will need the correct names for errmsg and errflg + # We will need the correct names for errmsg and errcode evar = self.host_model.find_variable(standard_name='ccpp_error_message') subst_dict = {'intent':'out'} if evar is not None: @@ -2455,24 +642,26 @@ def __init__(self, sdfs, host_model, scheme_headers, logger): else: raise CCPPError('Required variable, ccpp_error_message, not found') # end if - evar = self.host_model.find_variable(standard_name='ccpp_error_flag') + evar = self.host_model.find_variable(standard_name='ccpp_error_code') if evar is not None: - self._errflg_var = evar.clone(subst_dict) + self._errcode_var = evar.clone(subst_dict) else: - raise CCPPError('Required variable, ccpp_error_flag, not found') + raise CCPPError('Required variable, ccpp_error_code, not found') # end if # We need a call list for every phase self.__call_lists = {} for phase in CCPP_STATE_MACH.transitions(): - self.__call_lists[phase] = CallList('API_' + phase, logger=logger) - self.__call_lists[phase].add_variable(self.suite_name_var) + self.__call_lists[phase] = CallList('API_' + phase, run_env) + self.__call_lists[phase].add_variable(self.suite_name_var, run_env) if phase == RUN_PHASE_NAME: - self.__call_lists[phase].add_variable(self.suite_part_var) + self.__call_lists[phase].add_variable(self.suite_part_var, + run_env) # end if for suite in self.__suites: for group in suite.groups: if group.phase() == phase: self.__call_lists[phase].add_vars(group.call_list, + run_env, gen_unique=True) # end if # end for @@ -2491,7 +680,7 @@ def call_list(self, phase): # end if raise ParseInternalError("Illegal phase, '{}'".format(phase)) - def write(self, output_dir, logger): + def write(self, output_dir, run_env): """Write CCPP API module""" if not self.suites: raise CCPPError("No suite specified for generating API") @@ -2499,7 +688,7 @@ def write(self, output_dir, logger): api_filenames = list() # Write out the suite files for suite in self.suites: - out_file_name = suite.write(output_dir, logger) + out_file_name = suite.write(output_dir, run_env) api_filenames.append(out_file_name) # end for return api_filenames @@ -2510,12 +699,13 @@ def declare_inspection_interfaces(cls, ofile): ofile.write("public :: {}".format(API.__suite_fname), 1) ofile.write("public :: {}".format(API.__part_fname), 1) ofile.write("public :: {}".format(API.__vars_fname), 1) + ofile.write("public :: {}".format(API.__schemes_fname), 1) def get_errinfo_names(self): """Return a tuple of error output local names""" errmsg_name = self._errmsg_var.get_prop_value('local_name') - errflg_name = self._errflg_var.get_prop_value('local_name') - return (errmsg_name, errflg_name) + errcode_name = self._errcode_var.get_prop_value('local_name') + return (errmsg_name, errcode_name) @staticmethod def write_var_set_loop(ofile, varlist_name, var_list, indent, @@ -2538,30 +728,19 @@ def write_var_set_loop(ofile, varlist_name, var_list, indent, indent) # end for - def write_inspection_routines(self, ofile): - """Write the list_suites and list_suite_parts subroutines""" - errmsg_name, errflg_name = self.get_errinfo_names() - ofile.write("subroutine {}(suites)".format(API.__suite_fname), 1) - nsuites = len(self.suites) - oline = "character(len=*), allocatable, intent(out) :: suites(:)" - ofile.write(oline, 2) - ofile.write("\nallocate(suites({}))".format(nsuites), 2) - for ind, suite in enumerate(self.suites): - ofile.write("suites({}) = '{}'".format(ind+1, suite.name), 2) - # end for - ofile.write("end subroutine {}".format(API.__suite_fname), 1) - # Write out the suite part list subroutine - oline = "suite_name, part_list, {errmsg}, {errflg}" - inargs = oline.format(errmsg=errmsg_name, errflg=errflg_name) + def write_suite_part_list_sub(self, ofile, errmsg_name, errcode_name): + """Write the suite-part list subroutine""" + oline = "suite_name, part_list, {errmsg}, {errcode}" + inargs = oline.format(errmsg=errmsg_name, errcode=errcode_name) ofile.write("\nsubroutine {}({})".format(API.__part_fname, inargs), 1) oline = "character(len=*), intent(in) :: suite_name" ofile.write(oline, 2) oline = "character(len=*), allocatable, intent(out) :: part_list(:)" ofile.write(oline, 2) self._errmsg_var.write_def(ofile, 2, self) - self._errflg_var.write_def(ofile, 2, self) + self._errcode_var.write_def(ofile, 2, self) else_str = '' - ename = self._errflg_var.get_prop_value('local_name') + ename = self._errcode_var.get_prop_value('local_name') ofile.write("{} = 0".format(ename), 2) ename = self._errmsg_var.get_prop_value('local_name') ofile.write("{} = ''".format(ename), 2) @@ -2575,13 +754,15 @@ def write_inspection_routines(self, ofile): emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) emsg += "'No suite named ', trim(suite_name), ' found'" ofile.write(emsg, 3) - ofile.write("{errflg} = 1".format(errflg=errflg_name), 3) + ofile.write("{errcode} = 1".format(errcode=errcode_name), 3) ofile.write("end if", 2) ofile.write("end subroutine {}".format(API.__part_fname), 1) - # Write out the suite required variable subroutine - oline = "suite_name, variable_list, {errmsg}, {errflg}" + + def write_req_vars_sub(self, ofile, errmsg_name, errcode_name): + """Write the required variables subroutine""" + oline = "suite_name, variable_list, {errmsg}, {errcode}" oline += ", input_vars, output_vars, struct_elements" - inargs = oline.format(errmsg=errmsg_name, errflg=errflg_name) + inargs = oline.format(errmsg=errmsg_name, errcode=errcode_name) ofile.write("\nsubroutine {}({})".format(API.__vars_fname, inargs), 1) ofile.write("! Dummy arguments", 2) oline = "character(len=*), intent(in) :: suite_name" @@ -2589,7 +770,7 @@ def write_inspection_routines(self, ofile): oline = "character(len=*), allocatable, intent(out) :: variable_list(:)" ofile.write(oline, 2) self._errmsg_var.write_def(ofile, 2, self, extra_space=22) - self._errflg_var.write_def(ofile, 2, self, extra_space=22) + self._errcode_var.write_def(ofile, 2, self, extra_space=22) oline = "logical, optional, intent(in) :: input_vars" ofile.write(oline, 2) oline = "logical, optional, intent(in) :: output_vars" @@ -2602,7 +783,7 @@ def write_inspection_routines(self, ofile): ofile.write("logical {}:: struct_elements_use".format(' '*34), 2) ofile.write("integer {}:: num_vars".format(' '*34), 2) ofile.write("", 0) - ename = self._errflg_var.get_prop_value('local_name') + ename = self._errcode_var.get_prop_value('local_name') ofile.write("{} = 0".format(ename), 2) ename = self._errmsg_var.get_prop_value('local_name') ofile.write("{} = ''".format(ename), 2) @@ -2875,10 +1056,66 @@ def write_inspection_routines(self, ofile): emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) emsg += "'No suite named ', trim(suite_name), ' found'" ofile.write(emsg, 3) - ofile.write("{errflg} = 1".format(errflg=errflg_name), 3) + ofile.write("{errcode} = 1".format(errcode=errcode_name), 3) ofile.write("end if", 2) ofile.write("end subroutine {}".format(API.__vars_fname), 1) + def write_suite_schemes_sub(self, ofile, errmsg_name, errcode_name): + """Write the suite schemes list subroutine""" + oline = "suite_name, scheme_list, {errmsg}, {errcode}" + inargs = oline.format(errmsg=errmsg_name, errcode=errcode_name) + ofile.write("\nsubroutine {}({})".format(API.__schemes_fname, + inargs), 1) + oline = "character(len=*), intent(in) :: suite_name" + ofile.write(oline, 2) + oline = "character(len=*), allocatable, intent(out) :: scheme_list(:)" + ofile.write(oline, 2) + self._errmsg_var.write_def(ofile, 2, self) + self._errcode_var.write_def(ofile, 2, self) + else_str = '' + ename = self._errcode_var.get_prop_value('local_name') + ofile.write("{} = 0".format(ename), 2) + ename = self._errmsg_var.get_prop_value('local_name') + ofile.write("{} = ''".format(ename), 2) + for suite in self.suites: + oline = "{}if(trim(suite_name) == '{}') then" + ofile.write(oline.format(else_str, suite.name), 2) + # Collect the list of schemes in this suite + schemes = set() + for part in suite.groups: + schemes.update([x.name for x in part.schemes()]) + # end for + # Write out the list + API.write_var_set_loop(ofile, 'scheme_list', schemes, 3) + else_str = 'else ' + # end for + ofile.write("else", 2) + emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) + emsg += "'No suite named ', trim(suite_name), ' found'" + ofile.write(emsg, 3) + ofile.write("{errcode} = 1".format(errcode=errcode_name), 3) + ofile.write("end if", 2) + ofile.write("end subroutine {}".format(API.__schemes_fname), 1) + + def write_inspection_routines(self, ofile): + """Write the list_suites and list_suite_parts subroutines""" + errmsg_name, errcode_name = self.get_errinfo_names() + ofile.write("subroutine {}(suites)".format(API.__suite_fname), 1) + nsuites = len(self.suites) + oline = "character(len=*), allocatable, intent(out) :: suites(:)" + ofile.write(oline, 2) + ofile.write("\nallocate(suites({}))".format(nsuites), 2) + for ind, suite in enumerate(self.suites): + ofile.write("suites({}) = '{}'".format(ind+1, suite.name), 2) + # end for + ofile.write("end subroutine {}".format(API.__suite_fname), 1) + # Write out the suite part list subroutine + self.write_suite_part_list_sub(ofile, errmsg_name, errcode_name) + # Write out the suite required variable subroutine + self.write_req_vars_sub(ofile, errmsg_name, errcode_name) + # Write out the suite scheme list subroutine + self.write_suite_schemes_sub(ofile, errmsg_name, errcode_name) + @property def module(self): """Return the module name of the API.""" @@ -2906,23 +1143,20 @@ def suites(self): ############################################################################### if __name__ == "__main__": - # pylint: disable=ungrouped-imports - from parse_tools import init_log, set_log_to_null - LOGGING = init_log('ccpp_suite') - set_log_to_null(LOGGING) try: # First, run doctest import doctest doctest.testmod() # Goal: Replace this test with a suite from unit tests FRAME_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - CAM = os.path.dirname(FRAME_ROOT) - KESSLER = os.path.join(CAM, 'src', 'physics', 'ncar_ccpp', - 'suite_kessler.xml') - if os.path.exists(KESSLER): - _ = Suite(KESSLER, VarDictionary('Kessler'), LOGGING) + TEMP_SUITE = os.path.join(FRAME_ROOT, 'test', 'capgen_test', + 'temp_suite.xml') + if os.path.exists(TEMP_SUITE): + _ = Suite(TEMP_SUITE, VarDictionary('temp_suite', + _API_DUMMY_RUN_ENV), + _API_DUMMY_RUN_ENV) else: - print("Cannot find test file, '{}', skipping test".format(KESSLER)) + print("Cannot find test file, '{}', skipping test".format(TEMP_SUITE)) except CCPPError as suite_error: print("{}".format(suite_error)) # end if (no else) diff --git a/scripts/code_block.py b/scripts/code_block.py index eaf03e3d..391247e3 100644 --- a/scripts/code_block.py +++ b/scripts/code_block.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # """Class and methods to create a code block which can then be written diff --git a/scripts/common.py b/scripts/common.py index 166cb9be..0ac2c74d 100755 --- a/scripts/common.py +++ b/scripts/common.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 from collections import OrderedDict import keyword diff --git a/scripts/constituents.py b/scripts/constituents.py index 747b64a3..be8d4774 100644 --- a/scripts/constituents.py +++ b/scripts/constituents.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Class and supporting code to hold all information on CCPP constituent @@ -37,7 +37,7 @@ class ConstituentVarDict(VarDictionary): __const_prop_type_name = "ccpp_constituent_properties_t" __constituent_type = "suite" - def __init__(self, name, parent_dict, variables=None, logger=None): + def __init__(self, name, parent_dict, run_env, variables=None): """Create a specialized VarDictionary for constituents. The main difference is functionality to allocate and support these variables with special functions for the host model. @@ -46,9 +46,10 @@ def __init__(self, name, parent_dict, variables=None, logger=None): The feature of the VarDictionary class is required because this dictionary must be connected to a host model. """ - super(ConstituentVarDict, self).__init__(name, variables=variables, - parent_dict=parent_dict, - logger=logger) + self.__run_env = run_env + super(ConstituentVarDict, self).__init__(name, run_env, + variables=variables, + parent_dict=parent_dict) def find_variable(self, standard_name=None, source_var=None, any_scope=True, clone=None, @@ -84,14 +85,13 @@ def find_variable(self, standard_name=None, source_var=None, # end if if standard_name in self: var = self[standard_name] - elif any_scope and (self._parent_dict is not None): + elif any_scope and (self.parent is not None): srch_clist = search_call_list - var = self._parent_dict.find_variable(standard_name=standard_name, - source_var=source_var, - any_scope=any_scope, - clone=None, - search_call_list=srch_clist, - loop_subst=loop_subst) + var = self.parent.find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, clone=None, + search_call_list=srch_clist, + loop_subst=loop_subst) else: var = None # end if @@ -119,9 +119,24 @@ def find_variable(self, standard_name=None, source_var=None, # end for var = source_var.clone({'dimensions' : newdims}, remove_intent=True, source_type=self.__constituent_type) - self.add_variable(var) + self.add_variable(var, self.__run_env) return var + @staticmethod + def __init_err_var(evar, outfile, indent): + """If is a known error variable, generate the code to + initialize it as an output variable. + If unknown, simply ignore. + """ + stdname = evar.get_prop_value('standard_name') + if stdname == 'ccpp_error_message': + lname = evar.get_prop_value('local_name') + outfile.write("{} = ''".format(lname), indent) + elif stdname == 'ccpp_error_code': + lname = evar.get_prop_value('local_name') + outfile.write("{} = 0".format(lname), indent) + # end if (no else, just ignore) + def declare_public_interfaces(self, outfile, indent): """Declare the public constituent interfaces. Declarations are written to at indent, .""" @@ -150,24 +165,61 @@ def declare_private_data(self, outfile, indent): stmt = "private :: {}" outfile.write(stmt.format(self.constituent_prop_init_consts()), indent) + @classmethod + def __errcode_names(cls, err_vars): + """Return the ( ) where is the local name + for ccpp_error_code in and is the local name for + ccpp_error_message in . + if either variable is not found in , return None.""" + errcode = None + errmsg = None + for evar in err_vars: + stdname = evar.get_prop_value('standard_name') + if stdname == 'ccpp_error_code': + errcode = evar.get_prop_value('local_name') + elif stdname == 'ccpp_error_message': + errmsg = evar.get_prop_value('local_name') + else: + emsg = "Bad errcode variable, '{}'" + raise ParseInternalError(emsg.format(stdname)) + # end if + # end for + if (not errcode) or (not errmsg): + raise ParseInternalError("Unsupported error scheme") + # end if + return errcode, errmsg + + @staticmethod + def __errcode_callstr(errcode_name, errmsg_name, suite): + """Create and return the error code calling string for . + is the calling routine's ccpp_error_code variable name. + is the calling routine's ccpp_error_message variable name. + """ + err_vars = suite.find_error_variables(any_scope=True, clone_as_out=True) + errcode, errmsg = ConstituentVarDict.__errcode_names(err_vars) + errvar_str = "{}={}, {}={}".format(errcode, errcode_name, + errmsg, errmsg_name) + return errvar_str + def _write_init_check(self, outfile, indent, suite_name, - errvar_names, use_errflg): + err_vars, use_errcode): """Write a check to to make sure the constituent properties are initialized. Write code to initialize the error variables and/or set them to error values.""" outfile.write('', 0) - if use_errflg: - outfile.write("errflg = 0", indent+1) - outfile.write("errmsg = ''", indent+1) + if use_errcode: + errcode, errmsg = self.__errcode_names(err_vars) + outfile.write("{} = 0".format(errcode), indent+1) + outfile.write("{} = ''".format(errmsg), indent+1) else: - raise ParseInternalError("Alternative to errflg not implemented") + raise ParseInternalError("Alternative to errcode not implemented") # end if outfile.write("! Make sure that our constituent array is initialized", indent+1) stmt = "if (.not. {}) then" outfile.write(stmt.format(self.constituent_prop_init_name()), indent+1) - if use_errflg: - outfile.write("errflg = 1", indent+2) + if use_errcode: + outfile.write("{} = 1".format(errcode), indent+2) stmt = 'errmsg = "constituent properties not ' stmt += 'initialized for suite, {}"' outfile.write(stmt.format(suite_name), indent+2) @@ -175,34 +227,36 @@ def _write_init_check(self, outfile, indent, suite_name, # end if (no else until an alternative error mechanism supported) def _write_index_check(self, outfile, indent, suite_name, - errvar_names, use_errflg): + err_vars, use_errcode): """Write a check to to make sure the "index" input is in bounds. Write code to set error variables if index is out of bounds.""" - if use_errflg: + if use_errcode: + errcode, errmsg = self.__errcode_names(err_vars) if self: outfile.write("if (index < 1) then", indent+1) - outfile.write("errflg = 1", indent+2) - stmt = "write(errmsg, '(a,i0,a)') 'ERROR: index (',index,') " + outfile.write("{} = 1".format(errcode), indent+2) + stmt = "write({}, '(a,i0,a)') 'ERROR: index (',index,') " stmt += "too small, must be >= 1'" - outfile.write(stmt, indent+2) + outfile.write(stmt.format(errmsg), indent+2) stmt = "else if (index > SIZE({})) then" outfile.write(stmt.format(self.constituent_prop_array_name()), indent+1) - outfile.write("errflg = 1", indent+2) - stmt = "write(errmsg, '(2(a,i0))') 'ERROR: index (',index,') " + outfile.write("{} = 1".format(errcode), indent+2) + stmt = "write({}, '(2(a,i0))') 'ERROR: index (',index,') " stmt += "too large, must be <= ', SIZE({})" - outfile.write(stmt.format(self.constituent_prop_array_name()), + outfile.write(stmt.format(errmsg, + self.constituent_prop_array_name()), indent+2) outfile.write("end if", indent+1) else: - outfile.write("errflg = 1", indent+1) - stmt = "write(errmsg, '(a,i0,a)') 'ERROR: suite, {}, " + outfile.write("{} = 1".format(errcode), indent+1) + stmt = "write({}, '(a,i0,a)') 'ERROR: {}, " stmt += "has no constituents'" - outfile.write(stmt, indent+1) + outfile.write(stmt.format(errmsg, self.name), indent+1) # end if else: - raise ParseInternalError("Alternative to errflg not implemented") + raise ParseInternalError("Alternative to errcode not implemented") # end if def write_constituent_routines(self, outfile, indent, suite_name, err_vars): @@ -211,12 +265,20 @@ def write_constituent_routines(self, outfile, indent, suite_name, err_vars): this suite. Code is written to starting at indent, .""" # Format our error variables - errvar_names = [x.get_prop_value('local_name') for x in err_vars] - use_errflg = ('errflg' in errvar_names) and ('errmsg' in errvar_names) - errvar_alist = ", ".join([x for x in errvar_names]) + errvar_names = {x.get_prop_value('standard_name') : + x.get_prop_value('local_name') for x in err_vars} + errcode_snames = ('ccpp_error_code', 'ccpp_error_message') + use_errcode = all([x.get_prop_value('standard_name') in errcode_snames + for x in err_vars]) + errvar_alist = ", ".join([x for x in errvar_names.values()]) errvar_alist2 = ", {}".format(errvar_alist) if errvar_alist else "" - errvar_call = ", ".join(["{}={}".format(x,x) for x in errvar_names]) + call_vnames = {'ccpp_error_code' : 'errcode', + 'ccpp_error_message' : 'errmsg'} + errvar_call = ", ".join(["{}={}".format(call_vnames[x], errvar_names[x]) + for x in errcode_snames]) errvar_call2 = ", {}".format(errvar_call) if errvar_call else "" + local_call = ", ".join(["{}={}".format(errvar_names[x], errvar_names[x]) + for x in errcode_snames]) # Allocate and define constituents stmt = "subroutine {}({})".format(self.constituent_prop_init_consts(), errvar_alist) @@ -238,6 +300,7 @@ def write_constituent_routines(self, outfile, indent, suite_name, err_vars): # end if for std_name, var in self.items(): outfile.write("index = index + 1", indent+1) + long_name = var.get_prop_value('long_name') dims = var.get_dim_stdnames() if 'vertical_layer_dimension' in dims: vertical_dim = 'vertical_layer_dimension' @@ -247,10 +310,13 @@ def write_constituent_routines(self, outfile, indent, suite_name, err_vars): vertical_dim = '' # end if advect_str = self.TF_string(var.get_prop_value('advected')) - stmt = 'call {}(index)%initialize("{}", "{}", {}{})' + stmt = 'call {}(index)%initialize("{}", "{}", "{}", {}{})' outfile.write(stmt.format(self.constituent_prop_array_name(), - std_name, vertical_dim, advect_str, - errvar_call2), indent+1) + std_name, long_name, vertical_dim, + advect_str, errvar_call2), indent+1) + # end for + for evar in err_vars: + self.__init_err_var(evar, outfile, indent+1) # end for outfile.write("{} = .true.".format(self.constituent_prop_init_name()), indent+1) @@ -268,12 +334,15 @@ def write_constituent_routines(self, outfile, indent, suite_name, err_vars): for evar in err_vars: evar.write_def(outfile, indent+1, self, dummy=True) # end for + for evar in err_vars: + self.__init_err_var(evar, outfile, indent+1) + # end for outfile.write("! Make sure that our constituent array is initialized", indent+1) stmt = "if (.not. {}) then" outfile.write(stmt.format(self.constituent_prop_init_name()), indent+1) outfile.write("call {}({})".format(self.constituent_prop_init_consts(), - errvar_call), indent+2) + local_call), indent+2) outfile.write("end if", indent+1) outfile.write("{} = {}".format(fname, len(self)), indent+1) outfile.write("end function {}".format(fname), indent) @@ -290,9 +359,9 @@ def write_constituent_routines(self, outfile, indent, suite_name, err_vars): evar.write_def(outfile, indent+1, self, dummy=True) # end for self._write_init_check(outfile, indent, suite_name, - errvar_names, use_errflg) + err_vars, use_errcode) self._write_index_check(outfile, indent, suite_name, - errvar_names, use_errflg) + err_vars, use_errcode) if self: stmt = "call {}(index)%standard_name(name_out{})" outfile.write(stmt.format(self.constituent_prop_array_name(), @@ -314,9 +383,9 @@ def write_constituent_routines(self, outfile, indent, suite_name, err_vars): evar.write_def(outfile, indent+1, self, dummy=True) # end for self._write_init_check(outfile, indent, suite_name, - errvar_names, use_errflg) + err_vars, use_errcode) self._write_index_check(outfile, indent, suite_name, - errvar_names, use_errflg) + err_vars, use_errcode) if self: stmt = "cnst_out = {}(index)" outfile.write(stmt.format(self.constituent_prop_array_name()), @@ -392,15 +461,24 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, Output is written to . """ # XXgoldyXX: v need to generalize host model error var type support - err_callstr = "errflg=errflg, errmsg=errmsg" + use_errcode = [x.get_prop_value('standard_name') in + ('ccpp_error_code' 'ccpp_error_message') + for x in err_vars] + if not use_errcode: + emsg = "Error object not supported for {}" + raise ParseInternalError(emsg(host.name)) + # end if + herrcode, herrmsg = ConstituentVarDict.__errcode_names(err_vars) + err_dummy_str = "{errcode}, {errmsg}".format(errcode=herrcode, + errmsg=herrmsg) + obj_err_callstr = "errcode={errcode}, errmsg={errmsg}" + obj_err_callstr = obj_err_callstr.format(errcode=herrcode, + errmsg=herrmsg) # XXgoldyXX: ^ need to generalize host model error var type support - err_names = [x.get_prop_value('local_name') for x in err_vars] - errvar_str = ', '.join(err_names) # First up, the registration routine substmt = "subroutine {}".format(reg_funcname) stmt = "{}(suite_list, ncols, num_layers, num_interfaces, {})" - stmt = stmt.format(substmt, errvar_str) - cap.write(stmt, 1) + cap.write(stmt.format(substmt, err_dummy_str), 1) cap.write("! Create constituent object for suites in ", 2) cap.write("", 0) ConstituentVarDict.write_constituent_use_statements(cap, suite_list, 2) @@ -421,23 +499,28 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, cap.write("integer{} :: field_ind".format(spc), 2) cap.write("type({}), pointer :: const_prop".format(CONST_PROP_TYPE), 2) cap.write("", 0) + cap.write("{} = 0".format(herrcode), 2) cap.write("num_consts = 0", 2) for suite in suite_list: const_dict = suite.constituent_dictionary() funcname = const_dict.num_consts_funcname() cap.write("! Number of suite constants for {}".format(suite.name), 2) + errvar_str = ConstituentVarDict.__errcode_callstr(herrcode, + herrmsg, suite) cap.write("num_suite_consts = {}({})".format(funcname, errvar_str), 2) cap.write("num_consts = num_consts + num_suite_consts", 2) # end for - cap.write("if (errflg == 0) then", 2) + cap.write("if ({} == 0) then".format(herrcode), 2) cap.write("! Initialize constituent data and field object", 3) stmt = "call {}%initialize_table(num_consts)" cap.write(stmt.format(const_obj_name), 3) cap.write("end if", 2) for suite in suite_list: - cap.write("if (errflg == 0) then", 2) + errvar_str = ConstituentVarDict.__errcode_callstr(herrcode, + herrmsg, suite) + cap.write("if ({} == 0) then".format(herrcode), 2) cap.write("! Add {} constituent metadata".format(suite.name), 3) const_dict = suite.constituent_dictionary() funcname = const_dict.num_consts_funcname() @@ -446,41 +529,42 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, cap.write("end if", 2) funcname = const_dict.copy_const_subname() cap.write("do index = 1, num_suite_consts", 2) - cap.write("allocate(const_prop, stat=errflg)", 3) - cap.write("if (errflg /= 0) then", 3) - cap.write('errmsg = "ERROR allocating const_prop"', 4) + cap.write("allocate(const_prop, stat={})".format(herrcode), 3) + cap.write("if ({} /= 0) then".format(herrcode), 3) + cap.write('{} = "ERROR allocating const_prop"'.format(herrmsg), 4) cap.write("end if", 3) - cap.write("if (errflg == 0) then", 3) + cap.write("if ({} == 0) then".format(herrcode), 3) stmt = "call {}(index, const_prop, {})" - cap.write(stmt.format(funcname, err_callstr), 4) + cap.write(stmt.format(funcname, errvar_str), 4) cap.write("end if", 3) - cap.write("if (errflg == 0) then", 3) + cap.write("if ({} == 0) then".format(herrcode), 3) stmt = "call {}%new_field(const_prop, {})" - cap.write(stmt.format(const_obj_name, err_callstr), 4) + cap.write(stmt.format(const_obj_name, obj_err_callstr), 4) cap.write("end if", 3) cap.write("nullify(const_prop)", 3) - cap.write("if (errflg /= 0) then", 3) + cap.write("if ({} /= 0) then".format(herrcode), 3) cap.write("exit", 4) cap.write("end if", 3) cap.write("end do", 2) cap.write("", 0) # end for - cap.write("if (errflg == 0) then", 2) + cap.write("if ({} == 0) then".format(herrcode), 2) stmt = "call {}%lock_table(ncols, num_layers, num_interfaces, {})" - cap.write(stmt.format(const_obj_name, err_callstr), 3) + cap.write(stmt.format(const_obj_name, obj_err_callstr), 3) cap.write("end if", 2) cap.write("! Set the index for each active constituent", 2) cap.write("do index = 1, SIZE({})".format(const_indices_name), 2) stmt = "field_ind = {}%field_index({}(index), {})" - cap.write(stmt.format(const_obj_name, const_names_name, err_callstr), 3) + cap.write(stmt.format(const_obj_name, const_names_name, + obj_err_callstr), 3) cap.write("if (field_ind > 0) then", 3) cap.write("{}(index) = field_ind".format(const_indices_name), 4) cap.write("else", 3) - cap.write("errflg = 1", 4) - stmt = "errmsg = 'No field index for '//trim({}(index))" - cap.write(stmt.format(const_names_name), 4) + cap.write("{} = 1".format(herrcode), 4) + stmt = "{} = 'No field index for '//trim({}(index))" + cap.write(stmt.format(herrmsg, const_names_name), 4) cap.write("end if", 3) - cap.write("if (errflg /= 0) then", 3) + cap.write("if ({} /= 0) then".format(herrcode), 3) cap.write("exit", 4) cap.write("end if", 3) cap.write("end do", 2) @@ -488,7 +572,7 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, # Next, write num_consts routine substmt = "function {}".format(num_const_funcname) cap.write("", 0) - cap.write("integer {}({})".format(substmt, errvar_str), 1) + cap.write("integer {}({})".format(substmt, err_dummy_str), 1) cap.write("! Return the number of constituent fields for this run", 2) cap.write("", 0) cap.write("! Dummy arguments", 2) @@ -498,12 +582,12 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, cap.write("", 0) cap.write("{} = {}%num_constituents({})".format(num_const_funcname, const_obj_name, - err_callstr), 2) + obj_err_callstr), 2) cap.write("end {}".format(substmt), 1) # Next, write copy_in routine substmt = "subroutine {}".format(copy_in_funcname) cap.write("", 0) - cap.write("{}(const_array, {})".format(substmt, errvar_str), 1) + cap.write("{}(const_array, {})".format(substmt, err_dummy_str), 1) cap.write("! Copy constituent field info into ", 2) cap.write("", 0) cap.write("! Dummy arguments", 2) @@ -513,12 +597,12 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, # end for cap.write("", 0) cap.write("call {}%copy_in(const_array, {})".format(const_obj_name, - err_callstr), 2) + obj_err_callstr), 2) cap.write("end {}".format(substmt), 1) # Next, write copy_out routine substmt = "subroutine {}".format(copy_out_funcname) cap.write("", 0) - cap.write("{}(const_array, {})".format(substmt, errvar_str), 1) + cap.write("{}(const_array, {})".format(substmt, err_dummy_str), 1) cap.write("! Update constituent field info from ", 2) cap.write("", 0) cap.write("! Dummy arguments", 2) @@ -528,7 +612,7 @@ def write_host_routines(cap, host, reg_funcname, num_const_funcname, # end for cap.write("", 0) cap.write("call {}%copy_out(const_array, {})".format(const_obj_name, - err_callstr), 2) + obj_err_callstr), 2) cap.write("end {}".format(substmt), 1) @staticmethod diff --git a/scripts/conversion_tools/unit_conversion.py b/scripts/conversion_tools/unit_conversion.py index b8d50707..b9afa144 100755 --- a/scripts/conversion_tools/unit_conversion.py +++ b/scripts/conversion_tools/unit_conversion.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """A pilot version to perform unit conversions. Each conversion must be representable as a formula where {var} is substituted by the actual variable (scalar, array) to convert, diff --git a/scripts/ddt_library.py b/scripts/ddt_library.py index a569e1f8..876d2294 100644 --- a/scripts/ddt_library.py +++ b/scripts/ddt_library.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Class # @@ -9,6 +9,7 @@ # Python library imports from __future__ import print_function +import logging # CCPP framework imports from parse_tools import ParseInternalError, CCPPError, context_string from metavar import Var @@ -21,11 +22,12 @@ class VarDDT(Var): DDT nesting level). """ - def __init__(self, new_field, var_ref, logger=None, recur=False): + def __init__(self, new_field, var_ref, run_env, recur=False): """Initialize a new VarDDT object. is the DDT component. is a Var or VarDDT whose root originates in a model dictionary. + is the CCPPFrameworkEnv object for this framework run. The structure of the VarDDT object is: The super class Var object is a copy of the model root Var. The source = var_ref.source - super(VarDDT, self).__init__(var_ref, source, context=source.context, - logger=logger) + super().__init__(var_ref, source, run_env, context=source.context) # Find the correct place for if isinstance(var_ref, Var): # We are at a top level DDT var, set our field self.__field = new_field else: # Recurse to find correct (tail) location for - self.__field = VarDDT(new_field, var_ref.field, - logger=logger, recur=True) + self.__field = VarDDT(new_field, var_ref.field, run_env, recur=True) # End if - if (not recur) and (logger is not None): - logger.debug('Adding DDT field, {}'.format(self)) + if ((not recur) and + run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG)): + run_env.logger.debug('Adding DDT field, {}'.format(self)) # End if def is_ddt(self): @@ -57,13 +58,13 @@ def is_ddt(self): def get_parent_prop(self, name): """Return the Var property value for the parent Var object. """ - return super(VarDDT, self).get_prop_value(name) + return super().get_prop_value(name) def get_prop_value(self, name): """Return the Var property value for the leaf Var object. """ if self.field is None: - pvalue = super(VarDDT, self).get_prop_value(name) + pvalue = super().get_prop_value(name) else: pvalue = self.field.get_prop_value(name) # End if @@ -74,7 +75,7 @@ def intrinsic_elements(self, check_dict=None): See Var.intrinsic_elem for details """ if self.field is None: - pvalue = super(VarDDT, self).intrinsic_elements(check_dict=check_dict) + pvalue = super().intrinsic_elements(check_dict=check_dict) else: pvalue = self.field.intrinsic_elements(check_dict=check_dict) # End if @@ -91,10 +92,8 @@ def clone(self, subst_dict, source_name=None, source_type=None, by default, the source and type are the same as this Var (self). """ if self.field is None: - clone_var = super(VarDDT, self).clone(subst_dict, - source_name=source_name, - source_type=source_type, - context=context) + clone_var = super().clone(subst_dict, source_name=source_name, + source_type=source_type, context=context) else: clone_var = self.field.clone(subst_dict, source_name=source_name, @@ -107,7 +106,7 @@ def call_string(self, var_dict, loop_vars=None): """Return a legal call string of this VarDDT's local name sequence. """ # XXgoldyXX: Need to add dimensions to this - call_str = super(VarDDT, self).get_prop_value('local_name') + call_str = super().get_prop_value('local_name') if self.field is not None: call_str += '%' + self.field.call_string(var_dict, loop_vars=loop_vars) @@ -119,8 +118,8 @@ def write_def(self, outfile, indent, ddict, allocatable=False, dummy=False): The type of this declaration is the type of the Var at the end of the chain of references.""" if self.field is None: - super(VarDDT, self).write_def(outfile, indent, ddict, - allocatable=allocatable, dummy=dummy) + super().write_def(outfile, indent, ddict, + allocatable=allocatable, dummy=dummy) else: self.field.write_def(outfile, indent, ddict, allocatable=allocatable, dummy=dummy) @@ -174,7 +173,7 @@ def __str__(self): @property def var(self): "Return this VarDDT's Var object" - return super(VarDDT, self) + return super() @property def field(self): @@ -190,12 +189,15 @@ class DDTLibrary(dict): The dictionary holds known standard names. """ - def __init__(self, name, ddts=None, logger=None): + def __init__(self, name, run_env, ddts=None, logger=None): "Our dict is DDT definition headers, key is type" - self._name = '{}_ddt_lib'.format(name) - self._ddt_fields = {} # DDT field to DDT access map - self._max_mod_name_len = 0 - super(DDTLibrary, self).__init__() + self.__name = '{}_ddt_lib'.format(name) +# XXgoldyXX: v remove? +# self.__ddt_fields = {} # DDT field to DDT access map +# XXgoldyXX: ^ remove? + self.__max_mod_name_len = 0 + self.__run_env = run_env + super().__init__() if ddts is None: ddts = list() elif not isinstance(ddts, list): @@ -223,8 +225,8 @@ def __init__(self, name, ddts=None, logger=None): # End if self[ddt.title] = ddt dlen = len(ddt.module) - if dlen > self._max_mod_name_len: - self._max_mod_name_len = dlen + if dlen > self.__max_mod_name_len: + self.__max_mod_name_len = dlen # End if # End for @@ -245,7 +247,7 @@ def check_ddt_type(self, var, header, lname=None): # End if # End if (no else needed) - def collect_ddt_fields(self, var_dict, var, ddt=None): + def collect_ddt_fields(self, var_dict, var, run_env, ddt=None): """Add all the reachable fields from DDT variable of type, to . Each field is added as a VarDDT. """ @@ -261,12 +263,12 @@ def collect_ddt_fields(self, var_dict, var, ddt=None): # End if # End if for dvar in ddt.variable_list(): - subvar = VarDDT(dvar, var) + subvar = VarDDT(dvar, var, self.run_env) dvtype = dvar.get_prop_value('type') if (dvar.is_ddt()) and (dvtype in self): # If DDT in our library, we need to add sub-fields recursively. subddt = self[dvtype] - self.collect_ddt_fields(var_dict, subvar, subddt) + self.collect_ddt_fields(var_dict, subvar, run_env, ddt=subddt) else: # add_variable only checks the current dictionary. For a # DDT, the variable also cannot be in our parent dictionaries. @@ -281,7 +283,7 @@ def collect_ddt_fields(self, var_dict, var, ddt=None): raise CCPPError(emsg.format(stdname, ntx, ctx)) # end if # Add this intrinsic to - var_dict.add_variable(subvar) + var_dict.add_variable(subvar, run_env) # End for def ddt_modules(self, variable_list, ddt_mods=None): @@ -304,7 +306,7 @@ def ddt_modules(self, variable_list, ddt_mods=None): def write_ddt_use_statements(self, variable_list, outfile, indent, pad=0): """Write the use statements for all ddt modules needed by """ - pad = max(pad, self._max_mod_name_len) + pad = max(pad, self.__max_mod_name_len) ddt_mods = self.ddt_modules(variable_list) for ddt_mod in ddt_mods: dmod = ddt_mod[0] @@ -317,7 +319,12 @@ def write_ddt_use_statements(self, variable_list, outfile, indent, pad=0): @property def name(self): "Return the name of this DDT library" - return self._name + return self.__name + + @property + def run_env(self): + """Return the CCPPFrameworkEnv object for this DDT library""" + return self.__run_env ############################################################################### if __name__ == "__main__": diff --git a/scripts/file_utils.py b/scripts/file_utils.py index 8742ef6a..8cdfd023 100644 --- a/scripts/file_utils.py +++ b/scripts/file_utils.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Utilities for checking and manipulating file status @@ -209,7 +209,7 @@ def create_file_list(files, suffices, file_type, logger, root_path=None): txt_files = list() # Already processed txt files pathname = None if isinstance(files, str): - file_list = [x.strip() for x in files.split(',')] + file_list = [x.strip() for x in files.split(',') if x.strip()] elif isinstance(files, (list, tuple)): file_list = files else: diff --git a/scripts/fortran_tools/fortran_write.py b/scripts/fortran_tools/fortran_write.py index ac9a483e..ab8c9d30 100644 --- a/scripts/fortran_tools/fortran_write.py +++ b/scripts/fortran_tools/fortran_write.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # """Code to write Fortran code diff --git a/scripts/fortran_tools/parse_fortran.py b/scripts/fortran_tools/parse_fortran.py index 1bcd9dd3..2556e60f 100644 --- a/scripts/fortran_tools/parse_fortran.py +++ b/scripts/fortran_tools/parse_fortran.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Types and code for parsing Fortran source code. """ @@ -16,7 +16,7 @@ #pylint: disable=unused-import from parse_tools import ParseSource # Used in doctest #pylint: enable=unused-import -from metavar import Var +from metavar import FortranVar # pylint: enable=wrong-import-position # A collection of types and tools for parsing Fortran code to support @@ -474,6 +474,10 @@ class FtypeTypeDecl(Ftype): >>> FtypeTypeDecl.type_match('type(foo)') #doctest: +ELLIPSIS + >>> FtypeTypeDecl.type_match('class(foo)') #doctest: +ELLIPSIS + + >>> FtypeTypeDecl.class_match('class(foo)') #doctest: +ELLIPSIS + >>> FtypeTypeDecl.type_def_line('type GFS_statein_type') ['GFS_statein_type', None, None] >>> FtypeTypeDecl.type_def_line('type GFS_statein_type (n, m) ') @@ -490,9 +494,14 @@ class FtypeTypeDecl(Ftype): __type_attr_spec__ = ['abstract', 'bind', 'extends', 'private', 'public'] + __class_decl_re__ = re.compile(r"(?i)(class)\s*\(\s*([A-Z][A-Z0-9_]*)\s*\)") + def __init__(self, line, context): """Initialize an extended type from a declaration line""" match = FtypeTypeDecl.type_match(line) + if match is None: + match = FtypeTypeDecl.class_match(line) + # end if if match is None: raise ParseSyntaxError("type declaration", token=line, context=context) @@ -505,11 +514,21 @@ def __init__(self, line, context): @classmethod def type_match(cls, line): - """Return an RE match if represents an FtypeTypeDecl declaration""" + """Return an RE match if represents an FtypeTypeDecl declaration + """ match = FtypeTypeDecl.__type_decl_re__.match(line.strip()) # end if return match + @classmethod + def class_match(cls, line): + """Return an RE match if represents an FtypeTypeDecl declaration + representing the declaration of a polymorphic variable + """ + match = FtypeTypeDecl.__class_decl_re__.match(line.strip()) + # end if + return match + @classmethod def type_def_line(cls, line): """Return a type information if represents the start @@ -598,6 +617,12 @@ def ftype_factory(line, context): tobj = FtypeTypeDecl(line, context) # end if # end if + if tmatch is None: + tmatch = FtypeTypeDecl.class_match(line) + if tmatch is not None: + tobj = FtypeTypeDecl(line, context) + # end if + # end if return tobj ######################################################################## @@ -608,7 +633,7 @@ def fortran_type_definition(line): return FtypeTypeDecl.type_def_line(line) ######################################################################## -def parse_fortran_var_decl(line, source, logger=None): +def parse_fortran_var_decl(line, source, run_env): ######################################################################## """Parse a Fortran variable declaration line and return a list of Var objects representing the variables declared on . @@ -640,39 +665,41 @@ def parse_fortran_var_decl(line, source, logger=None): '(8)' >>> _VAR_ID_RE.match("foo(::,a:b,a:,:b)").group(2) '(::,a:b,a:,:b)' - >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('local_name') + >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('local_name') 'foo' - >>> parse_fortran_var_decl("integer :: foo = 0", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('local_name') + >>> parse_fortran_var_decl("integer :: foo = 0", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('local_name') 'foo' - >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('optional') + >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('optional') - >>> parse_fortran_var_decl("integer, optional :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('optional') + >>> parse_fortran_var_decl("integer, optional :: foo", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('optional') 'True' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') '(:)' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo(bar)", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo(bar)", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') '(bar)' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') '(:,:)' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'module', ParseContext()))[1].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[1].get_prop_value('dimensions') '(:)' - >>> parse_fortran_var_decl("real (kind=kind_phys), pointer :: phii (:,:) => null() !< interface geopotential height", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("real (kind=kind_phys), pointer :: phii (:,:) => null() !< interface geopotential height", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') '(:,:)' - >>> parse_fortran_var_decl("real(kind=kind_phys), dimension(im, levs, ntrac), intent(in) :: qgrs", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("real(kind=kind_phys), dimension(im, levs, ntrac), intent(in) :: qgrs", ParseSource('foo.F90', 'scheme', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') '(im, levs, ntrac)' - >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('local_name') + >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'scheme', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('local_name') 'errmsg' - >>> parse_fortran_var_decl("character(len=512), intent(out) :: errmsg", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('kind') + >>> parse_fortran_var_decl("character(len=512), intent(out) :: errmsg", ParseSource('foo.F90', 'scheme', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('kind') 'len=512' - >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(8)", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(8)", ParseSource('foo.F90', 'scheme', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') '(8)' - >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(size(bar))", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('dimensions') - '(size(bar))' - >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(8)", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_dimensions() + >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(8)", ParseSource('foo.F90', 'scheme', ParseContext()), _DUMMY_RUN_ENV)[0].get_dimensions() ['8'] - >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('local_name') #doctest: +IGNORE_EXCEPTION_DETAIL + >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'module', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('local_name') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid variable declaration, character(len=*), intent(out) :: errmsg, intent not allowed in module variable, in + + ## NB: Expressions (including function calls) not currently supported here + #>>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(size(bar))", ParseSource('foo.F90', 'scheme', ParseContext()), _DUMMY_RUN_ENV)[0].get_prop_value('dimensions') + #'(size(bar))' """ context = source.context sline = line.strip() @@ -697,10 +724,11 @@ def parse_fortran_var_decl(line, source, logger=None): typ = source.type errmsg = 'Invalid variable declaration, {}, intent' errmsg = errmsg + ' not allowed in {} variable' - if logger is not None: + if run_env.logger is not None: ctx = context_string(context) errmsg = "WARNING: " + errmsg + "{}" - logger.warning(errmsg.format(sline, typ, ctx)) + run_env.logger.warning(errmsg.format(sline, + typ, ctx)) else: raise ParseSyntaxError(errmsg.format(sline, typ), context=context) @@ -737,10 +765,10 @@ def parse_fortran_var_decl(line, source, logger=None): varname = var[0:ploc].strip() begin, end = check_balanced_paren(var) if (begin < 0) or (end < 0): - if logger is not None: + if run_env.logger is not None: ctx = context_string(context) errmsg = "WARNING: Invalid variable declaration, {}{}" - logger.warning(errmsg.format(var, ctx)) + run_env.logger.warning(errmsg.format(var, ctx)) else: raise ParseSyntaxError('variable declaration', token=var, context=context) @@ -779,8 +807,7 @@ def parse_fortran_var_decl(line, source, logger=None): # XXgoldyXX: I am nervous about allowing invalid Var objects here # Also, this tends to cause an exception that ends up back here # which is not a good idea. - var = Var(prop_dict, source, - invalid_ok=(logger is not None), logger=logger) + var = FortranVar(prop_dict, source, run_env) newvars.append(var) # end for # No else (not a variable declaration) @@ -802,4 +829,8 @@ def parse_fortran_var_decl(line, source, logger=None): if __name__ == "__main__": import doctest + from framework_env import CCPPFrameworkEnv + _DUMMY_RUN_ENV = CCPPFrameworkEnv(None, ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) doctest.testmod() diff --git a/scripts/fortran_tools/parse_fortran_file.py b/scripts/fortran_tools/parse_fortran_file.py index b37bd8db..4054a85e 100644 --- a/scripts/fortran_tools/parse_fortran_file.py +++ b/scripts/fortran_tools/parse_fortran_file.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ Tool to parse a Fortran file and return signature information from metadata tables. @@ -15,6 +15,7 @@ # pylint: disable=wrong-import-position import re from collections import OrderedDict +import logging # CCPP framework imports from parse_tools import CCPPError, ParseInternalError, ParseSyntaxError from parse_tools import ParseContext, ParseObject, ParseSource, PreprocStack @@ -483,14 +484,14 @@ def is_comment_statement(statement): ######################################################################## -def parse_type_def(statements, type_def, mod_name, pobj, logger): +def parse_type_def(statements, type_def, mod_name, pobj, run_env): """Parse a type definition from and return the remaining statements along with a MetadataTable object representing the type's variables.""" psrc = ParseSource(mod_name, 'ddt', pobj) seen_contains = False mheader = None - var_dict = VarDictionary(type_def[0]) + var_dict = VarDictionary(type_def[0], run_env) inspec = True while inspec and (statements is not None): while len(statements) > 0: @@ -499,21 +500,19 @@ def parse_type_def(statements, type_def, mod_name, pobj, logger): pmatch = _END_TYPE_RE.match(statement) if pmatch is not None: # We hit the end of the type, make a header - mheader = MetadataTable(table_name_in=type_def[0], + mheader = MetadataTable(run_env, table_name_in=type_def[0], table_type_in='ddt', - module=mod_name, var_dict=var_dict, - logger=logger) + module=mod_name, var_dict=var_dict) inspec = False elif is_contains_statement(statement, inspec): seen_contains = True elif not seen_contains: # Comment of variable if ((not is_comment_statement(statement)) and - (not parse_use_statement(statement, logger))): - dvars = parse_fortran_var_decl(statement, psrc, - logger=logger) + (not parse_use_statement(statement, run_env.logger))): + dvars = parse_fortran_var_decl(statement, psrc, run_env) for var in dvars: - var_dict.add_variable(var) + var_dict.add_variable(var, run_env) # End for # End if else: @@ -529,19 +528,19 @@ def parse_type_def(statements, type_def, mod_name, pobj, logger): ######################################################################## -def parse_preamble_data(statements, pobj, spec_name, endmatch, logger): +def parse_preamble_data(statements, pobj, spec_name, endmatch, run_env): """Parse module variables or DDT definitions from a module preamble or parse program variables from the beginning of a program. """ inspec = True mheaders = list() - var_dict = VarDictionary(spec_name) + var_dict = VarDictionary(spec_name, run_env) psrc = ParseSource(spec_name, 'MODULE', pobj) active_table = None - if logger is not None: + if run_env.logger is not None: ctx = context_string(pobj, nodir=True) msg = "Parsing preamble variables of {}{}" - logger.debug(msg.format(spec_name, ctx)) + run_env.logger.debug(msg.format(spec_name, ctx)) # End if while inspec and (statements is not None): while len(statements) > 0: @@ -559,43 +558,42 @@ def parse_preamble_data(statements, pobj, spec_name, endmatch, logger): # Put statement back so caller knows where we are statements.insert(0, statement) # Add the header (even if we found no variables) - mheader = MetadataTable(table_name_in=spec_name, + mheader = MetadataTable(run_env, table_name_in=spec_name, table_type_in='module', module=spec_name, - var_dict=var_dict, logger=logger) + var_dict=var_dict) mheaders.append(mheader) - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): ctx = context_string(pobj, nodir=True) msg = 'Adding header {}{}' - logger.debug(msg.format(mheader.table_name, ctx)) + run_env.logger.debug(msg.format(mheader.table_name, ctx)) break - elif ((type_def is not None) and + elif ((type_def is not None) and (active_table is not None) and (type_def[0].lower() == active_table.lower())): # Put statement back so caller knows where we are statements.insert(0, statement) statements, ddt = parse_type_def(statements, type_def, - spec_name, pobj, logger) + spec_name, pobj, run_env) if ddt is None: ctx = context_string(pobj, nodir=True) msg = "No DDT found at '{}'{}" raise CCPPError(msg.format(statement, ctx)) # End if mheaders.append(ddt) - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): ctx = context_string(pobj, nodir=True) msg = 'Adding DDT {}{}' - logger.debug(msg.format(ddt.table_name, ctx)) + run_env.logger.debug(msg.format(ddt.table_name, ctx)) # End if active_table = None elif active_table is not None: # We should have a variable definition to add if ((not is_comment_statement(statement)) and - (not parse_use_statement(statement, logger)) and + (not parse_use_statement(statement, run_env.logger)) and (active_table.lower() == spec_name.lower())): - dvars = parse_fortran_var_decl(statement, psrc, - logger=logger) + dvars = parse_fortran_var_decl(statement, psrc, run_env) for var in dvars: - var_dict.add_variable(var) + var_dict.add_variable(var, run_env) # End for # End if # End if (else we are not in an active table so just skip) @@ -608,7 +606,7 @@ def parse_preamble_data(statements, pobj, spec_name, endmatch, logger): ######################################################################## -def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): +def parse_scheme_metadata(statements, pobj, spec_name, table_name, run_env): "Parse dummy argument information from a subroutine" psrc = None mheader = None @@ -617,10 +615,10 @@ def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): # Find the subroutine line, should be first executable statement inpreamble = False insub = True - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): ctx = context_string(pobj, nodir=True) msg = "Parsing specification of {}{}" - logger.debug(msg.format(table_name, ctx)) + run_env.logger.debug(msg.format(table_name, ctx)) # End if ctx = context_string(pobj) # Save initial context with directory vdict = None # Initialized when we parse the subroutine arguments @@ -682,10 +680,9 @@ def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): inpreamble = False insub = False elif ((not is_comment_statement(statement)) and - (not parse_use_statement(statement, logger)) and + (not parse_use_statement(statement, run_env)) and is_dummy_argument_statement(statement)): - dvars = parse_fortran_var_decl(statement, psrc, - logger=logger) + dvars = parse_fortran_var_decl(statement, psrc, run_env) for var in dvars: lname = var.get_prop_value('local_name').lower() if lname in vdict: @@ -722,11 +719,11 @@ def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): errmsg = 'Missing local_variables, {} in {}' raise CCPPError(errmsg.format(missing, scheme_name)) # End if - var_dict = VarDictionary(scheme_name, variables=vdict) + var_dict = VarDictionary(scheme_name, run_env, variables=vdict) if (scheme_name is not None) and (var_dict is not None): - mheader = MetadataTable(table_name_in=scheme_name, + mheader = MetadataTable(run_env, table_name_in=scheme_name, table_type_in='scheme', module=spec_name, - var_dict=var_dict, logger=logger) + var_dict=var_dict) # End if return statements, mheader @@ -751,8 +748,8 @@ def duplicate_header(header, duplicate): ######################################################################## -def parse_specification(pobj, statements, mod_name=None, - prog_name=None, logger=None): +def parse_specification(pobj, statements, run_env, mod_name=None, + prog_name=None): """Parse specification part of a module or (sub)program""" if (mod_name is not None) and (prog_name is not None): raise ParseInternalError(" and cannot both be used") @@ -768,10 +765,10 @@ def parse_specification(pobj, statements, mod_name=None, else: raise ParseInternalError("One of or must be used") # End if - if logger is not None: + if run_env.logger is not None: ctx = context_string(pobj, nodir=True) msg = "Parsing specification of {}{}" - logger.debug(msg.format(spec_name, ctx)) + run_env.logger.debug(msg.format(spec_name, ctx)) # End if inspec = True @@ -791,18 +788,18 @@ def parse_specification(pobj, statements, mod_name=None, statements.insert(0, statement) statements, new_tbls = parse_preamble_data(statements, pobj, spec_name, - endmatch, logger) + endmatch, run_env) for tbl in new_tbls: title = tbl.table_name if title in mtables: errmsg = duplicate_header(mtables[title], tbl) raise CCPPError(errmsg) # end if - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): ctx = tbl.start_context() mtype = tbl.table_type msg = "Adding metadata from {}, {}{}" - logger.debug(msg.format(mtype, title, ctx)) + run_env.logger.debug(msg.format(mtype, title, ctx)) # End if mtables.append(tbl) # End if @@ -821,7 +818,7 @@ def parse_specification(pobj, statements, mod_name=None, ######################################################################## -def parse_program(pobj, statements, logger=None): +def parse_program(pobj, statements, run_env): """Parse a Fortran PROGRAM and return any leftover statements and metadata tables encountered in the PROGRAM.""" # The first statement should be a program statement, grab the name @@ -831,15 +828,14 @@ def parse_program(pobj, statements, logger=None): # End if prog_name = pmatch.group(1) pobj.enter_region('PROGRAM', region_name=prog_name, nested_ok=False) - if logger is not None: + if run_env.logger is not None: ctx = context_string(pobj, nodir=True) msg = "Parsing Fortran program, {}{}" - logger.debug(msg.format(prog_name, ctx)) + run_env.logger.debug(msg.format(prog_name, ctx)) # End if # After the program name is the specification part - statements, mtables = parse_specification(pobj, statements[1:], - prog_name=prog_name, - logger=logger) + statements, mtables = parse_specification(pobj, statements[1:], run_env, + prog_name=prog_name) # We really cannot have tables inside a program's executable section # Just read until end statements = read_statements(pobj, statements) @@ -863,7 +859,7 @@ def parse_program(pobj, statements, logger=None): ######################################################################## -def parse_module(pobj, statements, logger=None): +def parse_module(pobj, statements, run_env): """Parse a Fortran MODULE and return any leftover statements and metadata tables encountered in the MODULE.""" # The first statement should be a module statement, grab the name @@ -873,13 +869,14 @@ def parse_module(pobj, statements, logger=None): # End if mod_name = pmatch.group(1) pobj.enter_region('MODULE', region_name=mod_name, nested_ok=False) - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): ctx = context_string(pobj, nodir=True) msg = "Parsing Fortran module, {}{}" - logger.debug(msg.format(mod_name, ctx)) + run_env.logger.debug(msg.format(mod_name, ctx)) # End if # After the module name is the specification part - statements, mtables = parse_specification(pobj, statements[1:], mod_name=mod_name, logger=logger) + statements, mtables = parse_specification(pobj, statements[1:], run_env, + mod_name=mod_name) # Look for metadata tables statements = read_statements(pobj, statements) inmodule = pobj.in_region('MODULE', region_name=mod_name) @@ -901,18 +898,18 @@ def parse_module(pobj, statements, logger=None): statements, mheader = parse_scheme_metadata(statements, pobj, mod_name, active_table, - logger) + run_env) if mheader is not None: title = mheader.table_name if title in mtables: errmsg = duplicate_header(mtables[title], mheader) raise CCPPError(errmsg) # end if - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): mtype = mheader.table_type ctx = mheader.start_context() msg = "Adding metadata from {}, {}{}" - logger.debug(msg.format(mtype, title, ctx)) + run_env.logger.debug(msg.format(mtype, title, ctx)) # End if mtables.append(mheader) # End if @@ -929,10 +926,11 @@ def parse_module(pobj, statements, logger=None): ######################################################################## -def parse_fortran_file(filename, preproc_defs=None, logger=None): +def parse_fortran_file(filename, run_env): """Parse a Fortran file and return all metadata tables found.""" mtables = list() - pobj = read_file(filename, preproc_defs=preproc_defs, logger=logger) + pobj = read_file(filename, preproc_defs=run_env.preproc_defs, + logger=run_env.logger) pobj.reset_pos() curr_line, _ = pobj.curr_line() statements = line_statements(curr_line) @@ -944,12 +942,12 @@ def parse_fortran_file(filename, preproc_defs=None, logger=None): if _PROGRAM_RE.match(statement) is not None: # push statement back so parse_program can use it statements.insert(0, statement) - statements, ptables = parse_program(pobj, statements, logger=logger) + statements, ptables = parse_program(pobj, statements, run_env) mtables.extend(ptables) elif _MODULE_RE.match(statement) is not None: # push statement back so parse_module can use it statements.insert(0, statement) - statements, ptables = parse_module(pobj, statements, logger=logger) + statements, ptables = parse_module(pobj, statements, run_env) mtables.extend(ptables) # End if if (statements is not None) and (len(statements) == 0): diff --git a/scripts/framework_env.py b/scripts/framework_env.py new file mode 100644 index 00000000..8ee553f4 --- /dev/null +++ b/scripts/framework_env.py @@ -0,0 +1,385 @@ +#!/usr/bin/env python3 + +""" +Module to contain the runtime options for the CCPP Framework. +Function to parse arguments to the CCPP Framework and store them in an +object which allows various framework functions to access CCPP +Framework runtime information and parameter values. +""" + +# Python library imports +import argparse +import os + +_EPILOG = ''' +''' + +############################################################################### +class CCPPFrameworkEnv: +############################################################################### + """Object and methods to hold the runtime environment and parameter + options for the CCPP Framework""" + + def __init__(self, logger, ndict=None, verbose=0, clean=False, + host_files=None, scheme_files=None, suites=None, + preproc_directives=[], generate_docfiles=False, host_name='', + kind_types=[], use_error_obj=False, force_overwrite=False, + output_root=os.getcwd(), ccpp_datafile="datatable.xml"): + """Initialize a new CCPPFrameworkEnv object from the input arguments. + is a dict with the parsed command-line arguments (or a + dictionary created with the necessary arguments). + is a logger to be used by users of this object. + """ + emsg = '' + esep = '' + if ndict and ('verbose' in ndict): + self.__verbosity = ndict['verbose'] + del ndict['verbose'] + else: + self.__verbosity = verbose + # end if + if ndict and ('clean' in ndict): + self.__clean = ndict['clean'] + del ndict['clean'] + else: + self.__clean = clean + # end if + if ndict and ('host_files' in ndict): + self.__host_files = ndict['host_files'] + del ndict['host_files'] + if host_files and logger: + wmsg = "CCPPFrameworkEnv: Using ndict, ignoring 'host_files'" + logger.warning(wmsg) + # end if + elif host_files is None: + emsg += esep + "Error: 'host_files' list required" + esep = '\n' + else: + self.__host_files = host_files + # end if + if ndict and ('scheme_files' in ndict): + self.__scheme_files = ndict['scheme_files'] + del ndict['scheme_files'] + if scheme_files and logger: + wmsg = "CCPPFrameworkEnv: Using ndict, ignoring 'scheme_files'" + logger.warning(wmsg) + # end if + elif scheme_files is None: + emsg += esep + "Error: 'scheme_files' list required" + esep = '\n' + else: + self.__scheme_files = scheme_files + # end if + if ndict and ('suites' in ndict): + self.__suites = ndict['suites'] + del ndict['suites'] + if suites and logger: + wmsg = "CCPPFrameworkEnv: Using ndict, ignoring 'suites'" + logger.warning(wmsg) + # end if + elif suites is None: + emsg += esep + "Error: 'suites' list required" + esep = '\n' + else: + self.__suites = suites + # end if + if ndict and ('preproc_directives' in ndict): + preproc_defs = ndict['preproc_directives'] + del ndict['preproc_directives'] + else: + preproc_defs = preproc_directives + # end if + # Turn preproc_defs into a dictionary, start with a list to process + if isinstance(preproc_defs, list): + # Someone already handed us a list + preproc_list = preproc_defs + elif (not preproc_defs) or (preproc_defs == 'UNSET'): + # No preprocessor definitions + preproc_list = list() + elif ',' in preproc_defs: + # String of definitions, separated by commas + preproc_list = [x.strip() for x in preproc_defs.split(',')] + elif isinstance(preproc_defs, str): + # String of definitions, separated by spaces + preproc_list = [x.strip() for x in preproc_defs.split(' ') if x] + else: + wmsg = "Error: Bad preproc list type, '{}'" + emsg += esep + wmsg.format(type(preproc_defs)) + esep = '\n' + # end if + # Turn the list into a dictionary + self.__preproc_defs = {} + for item in preproc_list: + tokens = [x.strip() for x in item.split('=', 1)] + if len(tokens) > 2: + emsg += esep + "Error: Bad preproc def, '{}'".format(item) + esep = '\n' + else: + key = tokens[0] + if key[0:2] == '-D': + key = key[2:] + # end if + if len(tokens) > 1: + value = tokens[1] + else: + value = None + # end if + self.__preproc_defs[key] = value + # end if + # end for + if ndict and ('generate_docfiles' in ndict): + self.__generate_docfiles = ndict['generate_docfiles'] + del ndict['generate_docfiles'] + else: + self.__generate_docfiles = generate_docfiles + # end if + if ndict and ('host_name' in ndict): + self.__host_name = ndict['host_name'] + del ndict['host_name'] + else: + self.__host_name = host_name + # end if + self.__generate_host_cap = self.host_name != '' + self.__kind_dict = {} + if ndict and ("kind_type" in ndict): + kind_list = ndict["kind_type"] + del ndict["kind_type"] + else: + kind_list = kind_types + # end if + # Note that the command line uses repeated calls to 'kind_type' + for kind in kind_list: + kargs = [x.strip() for x in kind.strip().split('=')] + if len(kargs) != 2: + emsg += esep + emsg += "Error: '{}' is not a valid kind specification " + emsg += "(should be of the form =)" + emsg = emsg.format(kind) + esep = '\n' + else: + kind_name, kind_spec = kargs + # Do not worry about duplicates, just use last value + self.__kind_dict[kind_name] = kind_spec + # end if + # end for + # We always need a kind_phys so add a default if necessary + if "kind_phys" not in self.__kind_dict: + self.__kind_dict["kind_phys"] = "REAL64" + # end if + if ndict and ('use_error_obj' in ndict): + self.__use_error_obj = ndict['use_error_obj'] + del ndict['use_error_obj'] + else: + self.__use_error_obj = use_error_obj + # end if + if ndict and ('force_overwrite' in ndict): + self.__force_overwrite = ndict['force_overwrite'] + del ndict['force_overwrite'] + else: + self.__force_overwrite = force_overwrite + # end if + # Make sure we know where output is going + if ndict and ('output_root' in ndict): + self.__output_root = ndict['output_root'] + del ndict['output_root'] + else: + self.__output_root = output_root + # end if + self.__output_dir = os.path.abspath(self.output_root) + # Make sure we can create output database + if ndict and ('ccpp_datafile' in ndict): + self.__datatable_file = os.path.normpath(ndict['ccpp_datafile']) + del ndict['ccpp_datafile'] + else: + self.__datatable_file = ccpp_datafile + # end if + if not os.path.isabs(self.datatable_file): + self.__datatable_file = os.path.join(self.output_dir, + self.datatable_file) + # end if + self.__logger = logger + ## Check to see if anything is left in dictionary + if ndict: + for key in ndict: + emsg += esep + "Error: Unknown key in , '{}'".format(key) + esep = '\n' + # end for + # end if + # Raise an exception if any errors were found + if emsg: + raise ValueError(emsg) + # end if + + @property + def verbosity(self): + """Return the property for this CCPPFrameworkEnv object.""" + return self.__verbosity + + @property + def clean(self): + """Return the property for this CCPPFrameworkEnv object.""" + return self.__clean + + @property + def host_files(self): + """Return the property for this CCPPFrameworkEnv object.""" + return self.__host_files + + @property + def scheme_files(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__scheme_files + + @property + def suites(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__suites + + @property + def preproc_defs(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__preproc_defs + + @property + def generate_docfiles(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__generate_docfiles + + @property + def host_name(self): + """Return the property for this CCPPFrameworkEnv object.""" + return self.__host_name + + @property + def generate_host_cap(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__generate_host_cap + + def kind_spec(self, kind_type): + """Return the kind specification for kind type, + for this CCPPFrameworkEnv object. + If there is no entry for , return None.""" + kind_spec = None + if kind_type in self.__kind_dict: + kind_spec = self.__kind_dict[kind_type] + # end if + return kind_spec + + def kind_types(self): + """Return a list of all kind types defined in this + CCPPFrameworkEnv object.""" + return self.__kind_dict.keys() + + @property + def use_error_obj(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__use_error_obj + + @property + def force_overwrite(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__force_overwrite + + @property + def output_root(self): + """Return the property for this +CCPPFrameworkEnv object.""" + return self.__output_root + + @property + def output_dir(self): + """Return the property for this CCPPFrameworkEnv object.""" + return self.__output_dir + + @property + def datatable_file(self): + """Return the property for this + CCPPFrameworkEnv object.""" + return self.__datatable_file + + @property + def logger(self): + """Return the property for this CCPPFrameworkEnv object.""" + return self.__logger + +############################################################################### +def parse_command_line(args, description, logger=None): +############################################################################### + """Create an ArgumentParser to parse and return a CCPPFrameworkEnv + object containing the command-line arguments and related quantities.""" + ap_format = argparse.RawTextHelpFormatter + parser = argparse.ArgumentParser(description=description, + formatter_class=ap_format, epilog=_EPILOG) + + parser.add_argument("--host-files", metavar='', + type=str, required=True, + help="""Comma separated list of host filenames to process +Filenames with a '.meta' suffix are treated as host model metadata files +Filenames with a '.txt' suffix are treated as containing a list of .meta +filenames""") + + parser.add_argument("--scheme-files", metavar='', + type=str, required=True, + help="""Comma separated list of scheme filenames to process +Filenames with a '.meta' suffix are treated as scheme metadata files +Filenames with a '.txt' suffix are treated as containing a list of .meta +filenames""") + + parser.add_argument("--suites", metavar='', + type=str, required=True, + help="""Comma separated list of suite definition filenames to process +Filenames with a '.xml' suffix are treated as suite definition XML files +Other filenames are treated as containing a list of .xml filenames""") + + parser.add_argument("--preproc-directives", + metavar='VARDEF1[,VARDEF2 ...]', type=str, default='', + help="Proprocessor directives used to correctly parse source files") + + parser.add_argument("--ccpp-datafile", type=str, + metavar='', + default="datatable.xml", + help="Filename for information on content generated by the CCPP Framework") + + parser.add_argument("--output-root", type=str, + metavar='', + default=os.getcwd(), + help="directory for generated files") + + parser.add_argument("--host-name", type=str, default='', + help='''Name of host model to use in CCPP API +If this option is passed, a host model cap is generated''') + + parser.add_argument("--clean", action='store_true', default=False, + help='Remove files created by this script, then exit') + + parser.add_argument("--kind-type", type=str, action='append', + metavar="kind_type", default=list(), + help="""Data size for real() data. +Entry in the form of = +e.g., --kind-type "kind_phys=REAL64" +Enter more than one --kind-type entry to define multiple CCPP kinds. + SHOULD be a valid ISO_FORTRAN_ENV type""") + + parser.add_argument("--generate-docfiles", + metavar='HTML | Latex | HTML,Latex', type=str, + help="Generate LaTeX and/or HTML documentation") + + parser.add_argument("--use-error-obj", action='store_true', default=False, + help="""Host model and caps use an error object +instead of ccpp_error_message and ccpp_error_code.""") + + parser.add_argument("--force-overwrite", action='store_true', default=False, + help="""Overwrite all CCPP-generated files, even +if unmodified""") + + parser.add_argument("--verbose", action='count', default=0, + help="Log more activity, repeat for increased output") + pargs = parser.parse_args(args) + return CCPPFrameworkEnv(logger, vars(pargs)) diff --git a/scripts/host_cap.py b/scripts/host_cap.py index fcde1985..2408209d 100644 --- a/scripts/host_cap.py +++ b/scripts/host_cap.py @@ -1,18 +1,19 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Parse a host-model registry XML file and return the captured variables. """ # Python library imports +import logging import os -import os.path # CCPP framework imports from ccpp_suite import API from ccpp_state_machine import CCPP_STATE_MACH from constituents import ConstituentVarDict, CONST_DDT_NAME, CONST_DDT_MOD from ddt_library import DDTLibrary from file_utils import KINDS_MODULE +from framework_env import CCPPFrameworkEnv from metadata_table import MetadataTable from metavar import Var, VarDictionary, CCPP_CONSTANT_VARS from metavar import CCPP_LOOP_VAR_STDNAMES @@ -36,20 +37,30 @@ _API_SOURCE = ParseSource(_API_SRC_NAME, "MODULE", ParseContext(filename="host_cap.F90")) +_API_DUMMY_RUN_ENV = CCPPFrameworkEnv(None, ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) + _SUITE_NAME_VAR = Var({'local_name':'suite_name', 'standard_name':'suite_name', 'intent':'in', 'type':'character', 'kind':'len=*', 'units':'', 'protected':'True', - 'dimensions':'()'}, _API_SOURCE) + 'dimensions':'()'}, _API_SOURCE, _API_DUMMY_RUN_ENV) _SUITE_PART_VAR = Var({'local_name':'suite_part', 'standard_name':'suite_part', 'intent':'in', 'type':'character', 'kind':'len=*', 'units':'', 'protected':'True', - 'dimensions':'()'}, _API_SOURCE) + 'dimensions':'()'}, _API_SOURCE, _API_DUMMY_RUN_ENV) + +############################################################################### +# Used for creating blank dictionary +_MVAR_DUMMY_RUN_ENV = CCPPFrameworkEnv(None, ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) # Used to prevent loop substitution lookups -_BLANK_DICT = VarDictionary(_API_SRC_NAME) +_BLANK_DICT = VarDictionary(_API_SRC_NAME, _MVAR_DUMMY_RUN_ENV) ############################################################################### def suite_part_list(suite, stage): @@ -138,7 +149,7 @@ def constituent_model_const_indices(host_model): return unique_local_name(hstr, host_model) ############################################################################### -def add_constituent_vars(cap, host_model, suite_list, logger): +def add_constituent_vars(cap, host_model, suite_list, run_env): ############################################################################### """Create a DDT library containing array reference variables for each constituent field for all suites in . @@ -195,9 +206,9 @@ def add_constituent_vars(cap, host_model, suite_list, logger): # Add entries for each constituent (once per standard name) const_stdnames = set() for suite in suite_list: - if logger is not None: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): lmsg = "Adding constituents from {} to {}" - logger.debug(lmsg.format(suite.name, host_model.name)) + run_env.logger.debug(lmsg.format(suite.name, host_model.name)) # end if scdict = suite.constituent_dictionary() for cvar in scdict.variable_list(): @@ -254,23 +265,23 @@ def add_constituent_vars(cap, host_model, suite_list, logger): # Parse this table using a fake filename parse_obj = ParseObject("{}_constituent_mod.meta".format(host_model.name), ddt_mdata) - ddt_table = MetadataTable(parse_object=parse_obj, logger=logger) + ddt_table = MetadataTable(run_env, parse_object=parse_obj) ddt_name = ddt_table.sections()[0].title ddt_lib = DDTLibrary('{}_constituent_ddtlib'.format(host_model.name), - ddts=ddt_table.sections(), logger=logger) + run_env, ddts=ddt_table.sections()) # A bit of cleanup del parse_obj del ddt_mdata # Now, create the "host constituent module" dictionary const_dict = VarDictionary("{}_constituents".format(host_model.name), - parent_dict=host_model) + run_env, parent_dict=host_model) # Add in the constituents object prop_dict = {'standard_name' : "ccpp_model_constituents_object", 'local_name' : constituent_model_object_name(host_model), 'dimensions' : '()', 'units' : "None", 'ddt_type' : ddt_name} - const_var = Var(prop_dict, _API_SOURCE) + const_var = Var(prop_dict, _API_SOURCE, run_env) const_var.write_def(cap, 1, const_dict) - ddt_lib.collect_ddt_fields(const_dict, const_var) + ddt_lib.collect_ddt_fields(const_dict, const_var, run_env) # Declare variable for the constituent standard names array max_csname = max([len(x) for x in const_stdnames]) if const_stdnames else 0 num_const_fields = len(const_stdnames) @@ -297,8 +308,8 @@ def add_constituent_vars(cap, host_model, suite_list, logger): 'local_name' : ind_loc_name, 'dimensions' : '()', 'units' : 'index', 'protected' : "True", 'type' : 'integer', 'kind' : ''} - ind_var = Var(prop_dict, _API_SOURCE) - const_dict.add_variable(ind_var) + ind_var = Var(prop_dict, _API_SOURCE, run_env) + const_dict.add_variable(ind_var, run_env) # end for # Add vertical dimensions for DDT call strings pver = host_model.find_variable(standard_name=vert_layer_dim, @@ -310,8 +321,8 @@ def add_constituent_vars(cap, host_model, suite_list, logger): 'protected' : 'True', 'dimensions' : '()'} if const_dict.find_variable(standard_name=vert_layer_dim, any_scope=False) is None: - ind_var = Var(prop_dict, _API_SOURCE) - const_dict.add_variable(ind_var) + ind_var = Var(prop_dict, _API_SOURCE, _API_DUMMY_RUN_ENV) + const_dict.add_variable(ind_var, run_env) # end if # end if pver = host_model.find_variable(standard_name=vert_interface_dim, @@ -323,8 +334,8 @@ def add_constituent_vars(cap, host_model, suite_list, logger): 'protected' : 'True', 'dimensions' : '()'} if const_dict.find_variable(standard_name=vert_interface_dim, any_scope=False) is None: - ind_var = Var(prop_dict, _API_SOURCE) - const_dict.add_variable(ind_var) + ind_var = Var(prop_dict, _API_SOURCE, run_env) + const_dict.add_variable(ind_var, run_env) # end if # end if @@ -366,14 +377,14 @@ def suite_part_call_list(host_model, const_dict, suite_part, subst_loop_vars): return ', '.join(hmvars) ############################################################################### -def write_host_cap(host_model, api, output_dir, logger): +def write_host_cap(host_model, api, output_dir, run_env): ############################################################################### """Write an API to allow to call any configured CCPP suite""" module_name = "{}_ccpp_cap".format(host_model.name) cap_filename = os.path.join(output_dir, '{}.F90'.format(module_name)) - if logger is not None: + if run_env.logger is not None: msg = 'Writing CCPP Host Model Cap for {} to {}' - logger.info(msg.format(host_model.name, cap_filename)) + run_env.logger.info(msg.format(host_model.name, cap_filename)) # End if header = _HEADER.format(host_model=host_model.name) with FortranWriter(cap_filename, 'w', header, module_name) as cap: @@ -418,12 +429,12 @@ def write_host_cap(host_model, api, output_dir, logger): cap.write("public :: {}".format(copyout_name), 1) cap.write("", 0) cap.write("! Private module variables", 1) - const_dict = add_constituent_vars(cap, host_model, api.suites, logger) + const_dict = add_constituent_vars(cap, host_model, api.suites, run_env) cap.end_module_header() for stage in CCPP_STATE_MACH.transitions(): # Create a dict of local variables for stage host_local_vars = VarDictionary("{}_{}".format(host_model.name, - stage)) + stage), run_env) # Create part call lists # Look for any loop-variable mismatch for suite in api.suites: diff --git a/scripts/host_model.py b/scripts/host_model.py index 725acda0..cd0822a1 100644 --- a/scripts/host_model.py +++ b/scripts/host_model.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Parse a host-model registry XML file and return the captured variables. @@ -17,12 +17,18 @@ class HostModel(VarDictionary): """Class to hold the data from a host model""" - def __init__(self, meta_tables, name_in, logger): + def __init__(self, meta_tables, name_in, run_env): + """Initialize this HostModel object. + is a list of parsed host metadata tables. + is the name for this host model. + is the CCPPFrameworkEnv object for this framework run. + """ self.__name = name_in self.__var_locations = {} # Local name to module map self.__loop_vars = None # Loop control vars in interface calls self.__used_variables = None # Local names which have been requested self.__deferred_finds = None # Used variables that were missed at first + self.__run_env = run_env # First, process DDT headers meta_headers = list() for sect in [x.sections() for x in meta_tables.values()]: @@ -30,31 +36,31 @@ def __init__(self, meta_tables, name_in, logger): # end for # Initialize our dictionaries # Initialize variable dictionary - super(HostModel, self).__init__(self.name, logger=logger) - self.__ddt_lib = DDTLibrary('{}_ddts'.format(self.name), + super().__init__(self.name, run_env) + self.__ddt_lib = DDTLibrary('{}_ddts'.format(self.name), run_env, ddts=[d for d in meta_headers - if d.header_type == 'ddt'], - logger=logger) + if d.header_type == 'ddt']) self.__ddt_dict = VarDictionary("{}_ddt_vars".format(self.name), - parent_dict=self, logger=logger) + run_env, parent_dict=self) # Now, process the code headers by type self.__metadata_tables = meta_tables for header in [h for h in meta_headers if h.header_type != 'ddt']: title = header.title - if logger is not None: + if run_env.logger is not None: msg = 'Adding {} {} to host model' - logger.debug(msg.format(header.header_type, title)) + run_env.logger.debug(msg.format(header.header_type, title)) # End if if header.header_type == 'module': # Set the variable modules modname = header.title for var in header.variable_list(): - self.add_variable(var) + self.add_variable(var, run_env) lname = var.get_prop_value('local_name') self.__var_locations[lname] = modname self.ddt_lib.check_ddt_type(var, header, lname=lname) if var.is_ddt(): - self.ddt_lib.collect_ddt_fields(self.__ddt_dict, var) + self.ddt_lib.collect_ddt_fields(self.__ddt_dict, var, + run_env) # End if # End for elif header.header_type == 'host': @@ -63,10 +69,11 @@ def __init__(self, meta_tables, name_in, logger): self.__name = header.name # End if for var in header.variable_list(): - self.add_variable(var) + self.add_variable(var, run_env) self.ddt_lib.check_ddt_type(var, header) if var.is_ddt(): - self.ddt_lib.collect_ddt_fields(self.__ddt_dict, var) + self.ddt_lib.collect_ddt_fields(self.__ddt_dict, var, + run_env) # End if # End for loop_vars = header.variable_list(std_vars=False, @@ -75,12 +82,12 @@ def __init__(self, meta_tables, name_in, logger): # loop_vars are part of the host-model interface call # at run time. As such, they override the host-model # array dimensions. - self.__loop_vars = VarDictionary(self.name) + self.__loop_vars = VarDictionary(self.name, run_env) # End if for hvar in loop_vars: std_name = hvar.get_prop_value('standard_name') if std_name not in self.__loop_vars: - self.__loop_vars.add_variable(hvar) + self.__loop_vars.add_variable(hvar, run_env) else: ovar = self.__loop_vars[std_name] ctx1 = context_string(ovar.context) @@ -182,12 +189,11 @@ def find_variable(self, standard_name=None, source_var=None, """Return the host model variable matching or None If is True, substitute a begin:end range for an extent. """ - my_var = super(HostModel, - self).find_variable(standard_name=standard_name, - source_var=source_var, - any_scope=any_scope, clone=clone, - search_call_list=search_call_list, - loop_subst=loop_subst) + my_var = super().find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, clone=clone, + search_call_list=search_call_list, + loop_subst=loop_subst) if my_var is None: # Check our DDT library if standard_name is None: @@ -217,7 +223,7 @@ def find_variable(self, standard_name=None, source_var=None, new_var = my_var.clone(new_name, source_name=self.name, source_type="HOST", context=ctx) - self.add_variable(new_var) + self.add_variable(new_var, self.__run_env) my_var = new_var # End if # End if @@ -243,7 +249,7 @@ def find_variable(self, standard_name=None, source_var=None, # End if return my_var - def add_variable(self, newvar, exists_ok=False, gen_unique=False, + def add_variable(self, newvar, run_env, exists_ok=False, gen_unique=False, adjust_intent=False): """Add if it does not conflict with existing entries. For the host model, this includes entries in used DDT variables. @@ -266,9 +272,9 @@ def add_variable(self, newvar, exists_ok=False, gen_unique=False, raise CCPPError(emsg.format(standard_name, ntx, ctx)) # end if # No collision, proceed normally - super(HostModel, self).add_variable(newvar=newvar, exists_ok=exists_ok, - gen_unique=gen_unique, - adjust_intent=False) + super().add_variable(newvar=newvar, run_env=run_env, + exists_ok=exists_ok, gen_unique=gen_unique, + adjust_intent=False) def add_host_variable_module(self, local_name, module, logger=None): """Add a module name location for a host variable""" diff --git a/scripts/metadata2html.py b/scripts/metadata2html.py index 2e27ea3a..f9e8e8ab 100755 --- a/scripts/metadata2html.py +++ b/scripts/metadata2html.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import argparse import logging diff --git a/scripts/metadata_parser.py b/scripts/metadata_parser.py index e3ac7440..8e8c2dc2 100755 --- a/scripts/metadata_parser.py +++ b/scripts/metadata_parser.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import collections import logging diff --git a/scripts/metadata_table.py b/scripts/metadata_table.py index 777af4c5..55f97266 100644 --- a/scripts/metadata_table.py +++ b/scripts/metadata_table.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ There are four types of CCPP metadata tables, scheme, module, ddt, and host. A metadata file contains one or more metadata tables. @@ -89,9 +89,10 @@ dimensions = () intent = out [ ierr ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = error flag for error handling in CCPP type = integer + units = 1 dimensions = () intent=out @@ -102,8 +103,8 @@ table describes - ModuleName must match the name of the module whose variables the argument table describes -- for variable type definitions and module variables, the intent and - optional columns are not functional and should be omitted +- for variable type definitions and module variables, the intent keyword + is not functional and should be omitted - each argument table (and its subroutine) must accept the following two arguments for error handling (the local name can vary): [ errmsg ] standard_name = ccpp_error_message @@ -113,19 +114,18 @@ type = character kind = len=512 intent = out - optional = F [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out - optional = F """ # Python library imports import difflib +import logging import os.path import re # CCPP framework imports @@ -179,7 +179,7 @@ def _parse_config_line(line, context): ######################################################################## -def parse_metadata_file(filename, known_ddts, logger): +def parse_metadata_file(filename, known_ddts, run_env): """Parse and return list of parsed metadata tables""" # Read all lines of the file at once meta_tables = list() @@ -195,8 +195,8 @@ def parse_metadata_file(filename, known_ddts, logger): curr_line, curr_line_num = parse_obj.curr_line() while curr_line is not None: if MetadataTable.table_start(curr_line): - new_table = MetadataTable(parse_object=parse_obj, - known_ddts=known_ddts, logger=logger) + new_table = MetadataTable(run_env, parse_object=parse_obj, + known_ddts=known_ddts) ntitle = new_table.table_name if ntitle not in table_titles: meta_tables.append(new_table) @@ -269,9 +269,9 @@ class MetadataTable(): __table_start = re.compile(r"(?i)\s*\[\s*ccpp-table-properties\s*\]") - def __init__(self, table_name_in=None, table_type_in=None, + def __init__(self, run_env, table_name_in=None, table_type_in=None, dependencies=None, relative_path=None, known_ddts=None, - var_dict=None, module=None, parse_object=None, logger=None): + var_dict=None, module=None, parse_object=None): """Initialize a MetadataTable, either with a name, , and type, , or with information from a file (). if is None, and are @@ -284,6 +284,7 @@ def __init__(self, table_name_in=None, table_type_in=None, self.__dependencies = dependencies self.__relative_path = relative_path self.__sections = list() + self.__run_env = run_env if parse_object is None: if table_name_in is not None: self.__table_name = table_name_in @@ -304,7 +305,7 @@ def __init__(self, table_name_in=None, table_type_in=None, self.__start_context = None if (var_dict is not None) or module: if var_dict is None: - var_dict = VarDictionary(self.table_name) + var_dict = VarDictionary(self.table_name, run_env) # end if # This interface is used by the Fortran parser so strip off # the phase from the table name but not the section title @@ -314,9 +315,9 @@ def __init__(self, table_name_in=None, table_type_in=None, self.__table_name = fnam or stitle # end if sect = MetadataSection(self.table_name, self.table_type, - title=stitle, type_in=self.table_type, - module=module, var_dict=var_dict, - known_ddts=known_ddts, logger=logger) + run_env, title=stitle, + type_in=self.table_type, module=module, + var_dict=var_dict, known_ddts=known_ddts) self.__sections.append(sect) # end if else: @@ -341,10 +342,10 @@ def __init__(self, table_name_in=None, table_type_in=None, known_ddts = list() # end if self.__start_context = ParseContext(context=self.__pobj) - self.__init_from_file(known_ddts, logger) + self.__init_from_file(known_ddts, self.__run_env) # end if - def __init_from_file(self, known_ddts, logger): + def __init_from_file(self, known_ddts, run_env): """ Read the table preamble, assume the caller already figured out the first line of the header using the header_start method.""" curr_line, _ = self.__pobj.next_line() @@ -388,7 +389,8 @@ def __init_from_file(self, known_ddts, logger): emsg += "already been declared as None" self.__pobj.add_syntax_err(emsg) else: - depends = [x.strip() for x in value.split(',') if x.strip()] + depends = [x.strip() for x in value.split(',') + if x.strip()] self.__dependencies.extend(depends) # end if elif key == 'relative_path': @@ -404,9 +406,8 @@ def __init_from_file(self, known_ddts, logger): if MetadataSection.header_start(curr_line): skip_rest_of_section = False section = MetadataSection(self.table_name, self.table_type, - parse_object=self.__pobj, - known_ddts=known_ddts, - logger=logger) + run_env, parse_object=self.__pobj, + known_ddts=known_ddts) # Some table types only allow for one associated section if ((len(self.__sections) == 1) and (self.table_type in _SINGLETON_TABLE_TYPES)): @@ -474,6 +475,11 @@ def relative_path(self): """Return the relative path for the table's dependencies""" return self.__relative_path + @property + def run_env(self): + """Return this table's CCPPFrameworkEnv object""" + return self.__run_env + def __repr__(self): '''Print representation for MetadataTable objects''' return "<{} {} @ 0X{:X}>".format(self.__class__.__name__, @@ -498,7 +504,7 @@ def table_start(cls, line): class MetadataSection(ParseSource): """Class to hold all information from a metadata header - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = footable", "type = scheme", "module = foo", \ "[ im ]", "standard_name = horizontal_loop_extent", \ @@ -506,7 +512,7 @@ class MetadataSection(ParseSource): "units = index | type = integer", \ "dimensions = () | intent = in"])) #doctest: +ELLIPSIS <__main__.MetadataSection foo / footable at 0x...> - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = footable", "type = scheme", "module = foobar", \ "[ im ]", "standard_name = horizontal_loop_extent", \ @@ -514,7 +520,7 @@ class MetadataSection(ParseSource): "units = index | type = integer", \ "dimensions = () | intent = in"])).find_variable('horizontal_loop_extent') #doctest: +ELLIPSIS - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = footable", "type = scheme", "module = foobar", \ "process = microphysics", "[ im ]", \ @@ -523,7 +529,7 @@ class MetadataSection(ParseSource): "units = index | type = integer", \ "dimensions = () | intent = in"])).find_variable('horizontal_loop_extent') #doctest: +ELLIPSIS - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = footable", "type=scheme", "module = foo", \ "[ im ]", "standard_name = horizontal_loop_extent", \ @@ -533,7 +539,7 @@ class MetadataSection(ParseSource): " subroutine foo()"])).find_variable('horizontal_loop_extent') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): parse_source.ParseSyntaxError: Invalid variable property syntax, 'subroutine foo()', at foobar.txt:9 - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = footable", "type = scheme", "module=foobar", \ "[ im ]", "standard_name = horizontal_loop_extent", \ @@ -542,7 +548,7 @@ class MetadataSection(ParseSource): "dimensions = () | intent = in", \ ""], line_start=0)).find_variable('horizontal_loop_extent').get_prop_value('local_name') 'im' - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = footable", "type = scheme" \ "[ im ]", "standard_name = horizontalloop extent", \ @@ -551,7 +557,7 @@ class MetadataSection(ParseSource): "dimensions = () | intent = in", \ ""], line_start=0)).find_variable('horizontal_loop_extent') - >>> MetadataSection("footable", "scheme", \ + >>> MetadataSection("footable", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["[ccpp-arg-table]", "name = foobar", "type = scheme" \ "[ im ]", "standard_name = horizontal loop extent", \ @@ -560,7 +566,7 @@ class MetadataSection(ParseSource): "dimensions = () | intent = in", \ ""], line_start=0)).find_variable('horizontal_loop_extent') - >>> MetadataSection("foobar", "scheme", \ + >>> MetadataSection("foobar", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = foobar", "module = foo" \ "[ im ]", "standard_name = horizontal loop extent", \ @@ -569,7 +575,7 @@ class MetadataSection(ParseSource): "dimensions = () | intent = in", \ ""], line_start=0)).find_variable('horizontal_loop_extent') - >>> MetadataSection("foobar", "scheme", \ + >>> MetadataSection("foobar", "scheme", _DUMMY_RUN_ENV, \ parse_object=ParseObject("foobar.txt", \ ["name = foobar", "foo = bar" \ "[ im ]", "standard_name = horizontal loop extent", \ @@ -598,10 +604,11 @@ class MetadataSection(ParseSource): __vref_start = re.compile(r"^\[\s*"+FORTRAN_SCALAR_REF+r"\s*\]$") - def __init__(self, table_name, table_type, parse_object=None, + def __init__(self, table_name, table_type, run_env, parse_object=None, title=None, type_in=None, module=None, process_type=None, - var_dict=None, known_ddts=None, logger=None): - """If is not None, initialize from the current file and + var_dict=None, known_ddts=None): + """Initialize a new MetadataSection object. + If is not None, initialize from the current file and location in . If is None, initialize from , <type>, <module>, and <var_dict>. Note that if <parse_object> is not None, <title>, @@ -618,6 +625,7 @@ def __init__(self, table_name, table_type, parse_object=None, self.__module_name = None self.__process_type = UNKNOWN_PROCESS_TYPE self.__section_valid = True + self.__run_env = run_env if parse_object is None: if title is not None: self.__section_title = title @@ -657,11 +665,10 @@ def __init__(self, table_name, table_type, parse_object=None, self.__process_type = process_type # end if # Initialize our ParseSource parent - super(MetadataSection, self).__init__(self.title, - self.header_type, self.__pobj) - self.__variables = VarDictionary(self.title, logger=logger) + super().__init__(self.title, self.header_type, self.__pobj) + self.__variables = VarDictionary(self.title, run_env) for var in var_dict.variable_list(): # Let this crash if no dict - self.__variables.add_variable(var) + self.__variables.add_variable(var, run_env) # end for self.__start_context = None else: @@ -669,7 +676,7 @@ def __init__(self, table_name, table_type, parse_object=None, known_ddts = list() # end if self.__start_context = ParseContext(context=self.__pobj) - self.__init_from_file(table_name, table_type, known_ddts, logger) + self.__init_from_file(table_name, table_type, known_ddts, run_env) # end if # Register this header if it is a DDT if self.header_type == 'ddt': @@ -700,7 +707,7 @@ def _default_module(self): # end if return def_mod - def __init_from_file(self, table_name, table_type, known_ddts, logger): + def __init_from_file(self, table_name, table_type, known_ddts, run_env): """ Read the section preamble, assume the caller already figured out the first line of the header using the header_start method.""" start_ctx = context_string(self.__pobj) @@ -768,9 +775,9 @@ def __init_from_file(self, table_name, table_type, known_ddts, logger): self.__pobj.add_syntax_err(mismatch) self.__section_valid = False # end if - if logger: - logger.info("Parsing {} {}{}".format(self.header_type, - self.title, start_ctx)) + if run_env.logger and run_env.logger.isEnabledFor(logging.INFO): + run_env.logger.info("Parsing {} {}{}".format(self.header_type, + self.title, start_ctx)) # end if if self.header_type == "ddt": known_ddts.append(self.title) @@ -780,21 +787,20 @@ def __init_from_file(self, table_name, table_type, known_ddts, logger): self.__module_name = self._default_module() # end if # Initialize our ParseSource parent - super(MetadataSection, self).__init__(self.title, - self.header_type, self.__pobj) + super().__init__(self.title, self.header_type, self.__pobj) # Read the variables valid_lines = True - self.__variables = VarDictionary(self.title, logger=logger) + self.__variables = VarDictionary(self.title, run_env) while valid_lines: newvar, curr_line = self.parse_variable(curr_line, known_ddts) valid_lines = newvar is not None if valid_lines: - if logger: + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): dmsg = 'Adding {} to {}' lname = newvar.get_prop_value('local_name') - logger.debug(dmsg.format(lname, self.title)) + run_env.logger.debug(dmsg.format(lname, self.title)) # end if - self.__variables.add_variable(newvar) + self.__variables.add_variable(newvar, run_env) # Check to see if we hit the end of the table valid_lines = not MetadataSection.header_start(curr_line) else: @@ -906,7 +912,7 @@ def parse_variable(self, curr_line, known_ddts): var_props['local_name'] = sub_name # end if (else just leave the local name alone) try: - newvar = Var(var_props, source=self, context=context) + newvar = Var(var_props, self, self.run_env, context=context) except CCPPError as verr: self.__pobj.add_syntax_err(verr, skip_context=True) var_ok = False @@ -1068,7 +1074,7 @@ def convert_dims_to_standard_names(self, var, logger=None, context=None): std = var.get_prop_value('local_name') ctx = context_string(context) if logger is not None: - errmsg = "WARNING: " + errmsg + errmsg = "ERROR: " + errmsg logger.error(errmsg.format(item, std, ctx)) dname = unique_standard_name() else: @@ -1178,7 +1184,7 @@ def write_to_file(self, filename, append=False): # end with def __repr__(self): - base = super(MetadataSection, self).__repr__() + base = super().__repr__() pind = base.find(' object ') if pind >= 0: pre = base[0:pind] @@ -1229,6 +1235,11 @@ def has_variables(self): """Convenience function for finding empty headers""" return self.__variables + @property + def run_env(self): + """Return this section's CCPPFrameworkEnv object""" + return self.__run_env + @property def valid(self): """Return True iff we did not encounter an error creating @@ -1259,4 +1270,8 @@ def is_scalar_reference(test_val): if __name__ == "__main__": import doctest + from framework_env import CCPPFrameworkEnv + _DUMMY_RUN_ENV = CCPPFrameworkEnv(None, {'host_files':'', + 'scheme_files':'', + 'suites':''}) doctest.testmod() diff --git a/scripts/metavar.py b/scripts/metavar.py index d7fb1173..574c89ae 100755 --- a/scripts/metavar.py +++ b/scripts/metavar.py @@ -1,8 +1,7 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Classes and supporting code to hold all information on CCPP metadata variables -VariableProperty: Class which describes a single variable property Var: Class which holds all information on a single CCPP metadata variable VarSpec: Class to hold a standard_name description which can include dimensions VarAction: Base class for describing actions on variables @@ -12,20 +11,22 @@ """ # Python library imports -from __future__ import print_function import re from collections import OrderedDict # CCPP framework imports +from framework_env import CCPPFrameworkEnv from parse_tools import check_local_name, check_fortran_type, context_string -from parse_tools import FORTRAN_DP_RE, FORTRAN_SCALAR_REF_RE, fortran_list_match +from parse_tools import FORTRAN_SCALAR_REF_RE from parse_tools import check_units, check_dimensions, check_cf_standard_name from parse_tools import check_diagnostic_id, check_diagnostic_fixed from parse_tools import check_default_value, check_valid_values from parse_tools import ParseContext, ParseSource from parse_tools import ParseInternalError, ParseSyntaxError, CCPPError +from var_props import CCPP_LOOP_DIM_SUBSTS, VariableProperty, VarCompatObj +from var_props import find_horizontal_dimension, find_vertical_dimension +from var_props import standard_name_to_long_name, default_kind_val -############################################################################### -_REAL_SUBST_RE = re.compile(r"(.*\d)p(\d.*)") +############################################################################## # Dictionary of standard CCPP variables CCPP_STANDARD_VARS = { @@ -35,10 +36,10 @@ 'standard_name' : 'ccpp_constant_one', 'long_name' : "CCPP constant one", 'units' : '1', 'dimensions' : '()', 'type' : 'integer'}, - 'ccpp_error_flag' : - {'local_name' : 'errflg', 'standard_name' : 'ccpp_error_flag', + 'ccpp_error_code' : + {'local_name' : 'errflg', 'standard_name' : 'ccpp_error_code', 'long_name' : "CCPP error flag", - 'units' : 'flag', 'dimensions' : '()', 'type' : 'integer'}, + 'units' : '1', 'dimensions' : '()', 'type' : 'integer'}, 'ccpp_error_message' : {'local_name' : 'errmsg', 'standard_name' : 'ccpp_error_message', 'long_name' : "CCPP error message", @@ -85,367 +86,18 @@ 'vertical_layer_index', 'vertical_interface_index'] ############################################################################### -# Supported horizontal dimensions (should be defined in CCPP_STANDARD_VARS) -CCPP_HORIZONTAL_DIMENSIONS = ['ccpp_constant_one:horizontal_dimension', - 'ccpp_constant_one:horizontal_loop_extent', - 'horizontal_loop_begin:horizontal_loop_end', - 'horizontal_loop_extent'] - -############################################################################### -# Supported vertical dimensions (should be defined in CCPP_STANDARD_VARS) -CCPP_VERTICAL_DIMENSIONS = ['ccpp_constant_one:vertical_layer_dimension', - 'ccpp_constant_one:vertical_interface_dimension', - 'vertical_layer_index', 'vertical_interface_index'] - -############################################################################### -# Substituions for run time dimension control -CCPP_LOOP_DIM_SUBSTS = {'ccpp_constant_one:horizontal_dimension' : - 'horizontal_loop_begin:horizontal_loop_end', - 'ccpp_constant_one:vertical_layer_dimension' : - 'vertical_layer_index', - 'ccpp_constant_one:vertical_interface_dimension' : - 'vertical_interface_index'} - -######################################################################## -def standard_name_to_long_name(prop_dict, context=None): -######################################################################## - """Translate a standard_name to its default long_name - >>> standard_name_to_long_name({'standard_name':'cloud_optical_depth_layers_from_0p55mu_to_0p99mu'}) - 'Cloud optical depth layers from 0.55mu to 0.99mu' - >>> standard_name_to_long_name({'local_name':'foo'}) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No standard name to convert foo to long name - >>> standard_name_to_long_name({}) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No standard name to convert to long name - >>> standard_name_to_long_name({'local_name':'foo'}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No standard name to convert foo to long name at foo.F90:3 - >>> standard_name_to_long_name({}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No standard name to convert to long name at foo.F90:3 - """ - # We assume that standar_name has been checked for validity - # Make the first char uppercase and replace each underscore with a space - if 'standard_name' in prop_dict: - standard_name = prop_dict['standard_name'] - if standard_name: - long_name = standard_name[0].upper() + re.sub("_", " ", - standard_name[1:]) - else: - long_name = '' - # end if - # Next, substitute a decimal point for the p in [:digit]p[:digit] - match = _REAL_SUBST_RE.match(long_name) - while match is not None: - long_name = match.group(1) + '.' + match.group(2) - match = _REAL_SUBST_RE.match(long_name) - # end while - else: - long_name = '' - if 'local_name' in prop_dict: - lname = ' {}'.format(prop_dict['local_name']) - else: - lname = '' - # end if - ctxt = context_string(context) - emsg = 'No standard name to convert{} to long name{}' - raise CCPPError(emsg.format(lname, ctxt)) - # end if - return long_name - -######################################################################## -def default_kind_val(prop_dict, context=None): -######################################################################## - """Choose a default kind based on a variable's type - >>> default_kind_val({'type':'REAL'}) - 'kind_phys' - >>> default_kind_val({'type':'complex'}) - 'kind_phys' - >>> default_kind_val({'type':'double precision'}) - 'kind_phys' - >>> default_kind_val({'type':'integer'}) - '' - >>> default_kind_val({'type':'character'}) - '' - >>> default_kind_val({'type':'logical'}) - '' - >>> default_kind_val({'local_name':'foo'}) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No type to find default kind for foo - >>> default_kind_val({}) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No type to find default kind - >>> default_kind_val({'local_name':'foo'}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No type to find default kind for foo at foo.F90:3 - >>> default_kind_val({}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: No type to find default kind at foo.F90:3 - """ - if 'type' in prop_dict: - vtype = prop_dict['type'].lower() - if vtype == 'real': - kind = 'kind_phys' - elif vtype == 'complex': - kind = 'kind_phys' - elif FORTRAN_DP_RE.match(vtype) is not None: - kind = 'kind_phys' - else: - kind = '' - # end if - else: - kind = '' - if 'local_name' in prop_dict: - lname = ' {}'.format(prop_dict['local_name']) - errmsg = 'No type to find default kind for {ln}{ct}' - else: - lname = '' - errmsg = 'No type to find default kind{ct}' - # end if - ctxt = context_string(context) - raise CCPPError(errmsg.format(ln=lname, ct=ctxt)) - # end if - return kind - -######################################################################## - -class VariableProperty(object): - """Class to represent a single property of a metadata header entry - >>> VariableProperty('local_name', str) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('standard_name', str) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('long_name', str) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('units', str) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('dimensions', list) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('type', str) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('kind', str) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('state_variable', str, valid_values_in=['True', 'False', '.true.', '.false.' ], optional_in=True, default_in=False) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('intent', str, valid_values_in=['in', 'out', 'inout']) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('optional', str, valid_values_in=['True', 'False', '.true.', '.false.' ], optional_in=True, default_in=False) #doctest: +ELLIPSIS - <__main__.VariableProperty object at ...> - >>> VariableProperty('local_name', str).name - 'local_name' - >>> VariableProperty('standard_name', str).type == str - True - >>> VariableProperty('units', str).is_match('units') - True - >>> VariableProperty('units', str).is_match('UNITS') - True - >>> VariableProperty('units', str).is_match('type') - False - >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('2') - 2 - >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('3') - - >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('3', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: Invalid value variable property, '3' - >>> VariableProperty('units', str, check_fn_in=check_units).valid_value('m s-1') - 'm s-1' - >>> VariableProperty('units', str, check_fn_in=check_units).valid_value(' ') - - >>> VariableProperty('units', str, check_fn_in=check_units).valid_value(' ', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: ' ' is not a valid unit - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('()') - [] - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x)') - ['x'] - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('x') - - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x:y)') - ['x:y'] - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(w:x,y:z)') - ['w:x', 'y:z'] - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value(['size(foo)']) - ['size(foo)'] - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(w:x,x:y:z:q)', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: 'x:y:z:q' is an invalid dimension range - >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x:3y)', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - CCPPError: '3y' is not a valid Fortran identifier - >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('foo') - 'foo' - >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('foo(bar)') - 'foo(bar)' - >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('q(:,:,index_of_water_vapor_specific_humidity)') - 'q(:,:,index_of_water_vapor_specific_humidity)' - """ - - __true_vals = ['t', 'true', '.true.'] - __false_vals = ['f', 'false', '.false.'] - - def __init__(self, name_in, type_in, valid_values_in=None, - optional_in=False, default_in=None, default_fn_in=None, - check_fn_in=None, mult_entry_ok=False): - """Conduct sanity checks and initialize this variable property.""" - self._name = name_in - self._type = type_in - if self._type not in [bool, int, list, str]: - emsg = "{} has invalid VariableProperty type, '{}'" - raise CCPPError(emsg.format(name_in, type_in)) - # end if - self._valid_values = valid_values_in - self._optional = optional_in - self._default = None - self._default_fn = None - if self.optional: - if (default_in is None) and (default_fn_in is None): - emsg = 'default_in or default_fn_in is a required property for {} because it is optional' - raise CCPPError(emsg.format(name_in)) - if (default_in is not None) and (default_fn_in is not None): - emsg = 'default_in and default_fn_in cannot both be provided' - raise CCPPError(emsg) - self._default = default_in - self._default_fn = default_fn_in - elif default_in is not None: - emsg = 'default_in is not a valid property for {} because it is not optional' - raise CCPPError(emsg.format(name_in)) - elif default_in is not None: - emsg = 'default_fn_in is not a valid property for {} because it is not optional' - raise CCPPError(emsg.format(name_in)) - self._check_fn = check_fn_in - self._add_multiple_ok = mult_entry_ok - - @property - def name(self): - """Return the name of the property""" - return self._name - - @property - def type(self): - """Return the type of the property""" - return self._type - - @property - def has_default_func(self): - """Return True iff this variable property has a default function""" - return self._default_fn is not None - - def get_default_val(self, prop_dict, context=None): - """Return this variable property's default value or raise an - exception if there is no default value or default value function.""" - if self.has_default_func: - return self._default_fn(prop_dict, context) - # end if - if self._default is not None: - return self._default - # end if - ctxt = context_string(context) - emsg = 'No default for variable property {}{}' - raise CCPPError(emsg.format(self.name, ctxt)) - - - @property - def optional(self): - """Return True iff this variable property is optional""" - return self._optional - - @property - def add_multiple(self): - """Return True iff multiple entries of this property should be - accumulated. If False, it should either be an error or new - instances should replace the old, however, this functionality - must be implemented by the calling routine (e.g., Var)""" - return self._add_multiple_ok - - def is_match(self, test_name): - """Return True iff <test_name> is the name of this property""" - return self.name.lower() == test_name.lower() - - def valid_value(self, test_value, prop_dict=None, error=False): - """Return a valid version of <test_value> if it is valid. - If <test_value> is not valid, return None or raise an exception, - depending on the value of <error>. - If <prop_dict> is not None, it may be used in value validation. - """ - valid_val = None - if self.type is int: - try: - tval = int(test_value) - if self._valid_values is not None: - if tval in self._valid_values: - valid_val = tval - else: - valid_val = None # i.e. pass - else: - valid_val = tval - except CCPPError: - valid_val = None # Redundant but more expressive than pass - elif self.type is list: - if isinstance(test_value, str): - tval = fortran_list_match(test_value) - if tval and (len(tval) == 1) and (not tval[0]): - # Scalar - tval = list() - # end if - else: - tval = test_value - # end if - if isinstance(tval, list): - valid_val = tval - elif isinstance(tval, tuple): - valid_val = list(tval) - else: - valid_val = None - # end if - if (valid_val is not None) and (self._valid_values is not None): - # Special case for lists, _valid_values applies to elements - for item in valid_val: - if item not in self._valid_values: - valid_val = None - break - # end if - # end for - else: - pass - elif self.type is bool: - if isinstance(test_value, str): - if test_value.lower() in VariableProperty.__true_vals + VariableProperty.__false_vals: - valid_val = test_value.lower() in VariableProperty.__true_vals - else: - valid_val = None # i.e., pass - # end if - else: - valid_val = not not test_value # pylint: disable=unneeded-not - elif self.type is str: - if isinstance(test_value, str): - if self._valid_values is not None: - if test_value in self._valid_values: - valid_val = test_value - else: - valid_val = None # i.e., pass - else: - valid_val = test_value - # end if - # end if - # end if - # Call a check function? - if valid_val and (self._check_fn is not None): - valid_val = self._check_fn(valid_val, prop_dict, error) - elif (valid_val is None) and error: - emsg = "Invalid {} variable property, '{}'" - raise CCPPError(emsg.format(self.name, test_value)) - # end if - return valid_val +# Used for creating template variables +_MVAR_DUMMY_RUN_ENV = CCPPFrameworkEnv(None, ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) -############################################################################### +############################################################################## -class Var(object): +class Var: """ A class to hold a metadata or code variable. Var objects should be treated as immutable. >>> Var.get_prop('standard_name') #doctest: +ELLIPSIS - <__main__.VariableProperty object at 0x...> + <var_props.VariableProperty object at 0x...> >>> Var.get_prop('standard') >>> Var.get_prop('type').is_match('type') @@ -462,32 +114,32 @@ class Var(object): >>> Var.get_prop('dimensions').valid_value(['Bob', 'Ray']) ['Bob', 'Ray'] - >>> Var.get_prop('active') - '.true.' + >>> Var.get_prop('active') #doctest: +ELLIPSIS + <var_props.VariableProperty object at 0x...> >>> Var.get_prop('active').valid_value('flag_for_aerosol_physics') 'flag_for_aerosol_physics' - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('long_name') + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV).get_prop_value('long_name') 'Hi mom' - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('intent') + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV).get_prop_value('intent') 'in' - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('units') - 'm/s' - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('units') #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV).get_prop_value('units') + 'm s-1' + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV).get_prop_value('units') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Required property, 'units', missing, in <standard input> - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : ' ', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('units') #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : ' ', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV).get_prop_value('units') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: foo: ' ' is not a valid unit, in <standard input> - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'ttype' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'ttype' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid metadata variable property, 'ttype', in <standard input> - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Required property, 'units', missing, in <standard input> - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'inout', 'protected' : '.true.'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'inout', 'protected' : '.true.'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: foo is marked protected but is intent inout, at <standard input>:1 - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'ino'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'ino'}, ParseSource('vname', 'SCHEME', ParseContext()), _MVAR_DUMMY_RUN_ENV) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid intent variable property, 'ino', at <standard input>:1 """ @@ -512,8 +164,6 @@ class Var(object): default_fn_in=default_kind_val), VariableProperty('state_variable', bool, optional_in=True, default_in=False), - VariableProperty('optional', bool, - optional_in=True, default_in=False), VariableProperty('protected', bool, optional_in=True, default_in=False), VariableProperty('allocatable', bool, @@ -531,7 +181,9 @@ class Var(object): valid_values_in=['timestep', 'run'], default_in='timestep'), VariableProperty('active', str, optional_in=True, - default_in='.true.')] + default_in='.true.'), + VariableProperty('polymorphic', bool, optional_in=True, + default_in='.false.')] # XXgoldyXX: v debug only __to_add = VariableProperty('valid_values', str, @@ -577,20 +229,22 @@ class Var(object): __var_propdict.update({p.name : p for p in __constituent_props}) # All constituent props are optional so no check - def __init__(self, prop_dict, source, context=None, - invalid_ok=False, logger=None, clone_source=None): + def __init__(self, prop_dict, source, run_env, context=None, + clone_source=None): """Initialize a new Var object. - NB: <invalid_ok>=True is dangerous because it allows creation - of a Var object with invalid properties. - In order to prevent silent failures, <invalid_ok> requires a logger - (passed through the <logger> input) in order to take effect. If <prop_dict> is really a Var object, use that object's prop_dict. If this Var object is a clone, record the original Var object for reference + <source> is a ParseSource object describing the source of this Var. + <run_env> is the CCPPFrameworkEnv object for this framework run. + <context> is a ParseContext object + <clone_source> is a Var object. If provided, it is used as the original + source of a cloned variable. """ self.__parent_var = None # for array references self.__children = list() # This Var's array references self.__clone_source = clone_source + self.__run_env = run_env if isinstance(prop_dict, Var): prop_dict = prop_dict.copy_prop_dict() # end if @@ -635,30 +289,18 @@ def __init__(self, prop_dict, source, context=None, # Make sure required properties are present for propname in self.__required_props: if propname not in prop_dict: - if invalid_ok and (logger is not None): - ctx = context_string(self.context) - logger.warning("Required property, '{}', missing{}".format(propname, ctx)) - else: - emsg = "Required property, '{}', missing" - raise ParseSyntaxError(emsg.format(propname), - context=self.context) - # end if + emsg = "Required property, '{}', missing" + raise ParseSyntaxError(emsg.format(propname), + context=self.context) # end if # end for # Check for any mismatch if ('protected' in prop_dict) and ('intent' in prop_dict): if (prop_dict['intent'].lower() != 'in') and prop_dict['protected']: - if invalid_ok and (logger is not None): - ctx = context_string(self.context) - wmsg = "{} is marked protected but is intent {}{}" - logger.warning(wmsg.format(prop_dict['local_name'], - prop_dict['intent'], ctx)) - else: - emsg = "{} is marked protected but is intent {}" - raise ParseSyntaxError(emsg.format(prop_dict['local_name'], - prop_dict['intent']), - context=self.context) - # end if + emsg = "{} is marked protected but is intent {}" + raise ParseSyntaxError(emsg.format(prop_dict['local_name'], + prop_dict['intent']), + context=self.context) # end if # end if # Look for any constituent properties @@ -692,75 +334,41 @@ def __init__(self, prop_dict, source, context=None, prop_dict=self._prop_dict, error=True) # end for except CCPPError as cperr: - if invalid_ok and (logger is not None): - ctx = context_string(self.context) - wmsg = "{}: {}{}" - logger.warning(wmsg.format(self._prop_dict['local_name'], - cperr, ctx)) - else: - emsg = "{}: {}" - lname = self._prop_dict['local_name'] - raise ParseSyntaxError(emsg.format(lname, cperr), - context=self.context) - # end if + lname = self._prop_dict['local_name'] + emsg = "{}: {}" + raise ParseSyntaxError(emsg.format(lname, cperr), + context=self.context) from cperr # end try - def compatible(self, other, logger=None): - """Return True, None iff <other> is compatible with self. - If not compatible, return False,<reason> where <reason> is - a string describing the incompatibility. + def compatible(self, other, run_env): + """Return a VarCompatObj object which describes the equivalence, + compatibility, or incompatibility between <self> and <other>. """ # We accept character(len=*) as compatible with # character(len=INTEGER_VALUE) - compat = False - reason = None stype = self.get_prop_value('type') skind = self.get_prop_value('kind') sunits = self.get_prop_value('units') sstd_name = self.get_prop_value('standard_name') + sloc_name = self.get_prop_value('local_name') + sdims = self.get_dimensions() otype = other.get_prop_value('type') okind = other.get_prop_value('kind') ounits = other.get_prop_value('units') ostd_name = other.get_prop_value('standard_name') - if stype == 'character': - kind_eq = ((skind == okind) or - (skind == 'len=*' and okind.startswith('len=')) or - (skind.startswith('len=') and okind == 'len=*')) - else: - kind_eq = skind == okind - # end if - if ((sstd_name == ostd_name) and kind_eq and - (sunits == ounits) and (stype == otype)): - compat = True - else: - logger_str = None - error_str = None - if sstd_name != ostd_name: - logger_str = "standard_name: '{}' != '{}'".format(sstd_name, - ostd_name) - reason = 'standard_name' - elif not kind_eq: - logger_str = "kind: '{}' != '{}'".format(skind, okind) - reason = 'kind' - elif sunits != ounits: - logger_str = "units: '{}' != '{}'".format(sunits, ounits) - reason = 'units' - elif stype != otype: - logger_str = "type: '{}' != '{}'".format(stype, otype) - reason = 'type' - else: - error_str = 'Why are these variables not compatible?' - reason = 'UNKNOWN' - # end if - if logger is not None: - if error_str is not None: - logger.error('{}'.format(error_str)) - elif logger_str is not None: - logger.info('{}'.format(logger_str)) - # end if (no else) - # end if - # end if - return compat, reason + oloc_name = other.get_prop_value('local_name') + odims = other.get_dimensions() + compat = VarCompatObj(sstd_name, stype, skind, sunits, sdims, sloc_name, + ostd_name, otype, okind, ounits, odims, oloc_name, + run_env, + v1_context=self.context, v2_context=other.context) + if (not compat) and (run_env.logger is not None): + incompat_str = compat.incompat_reason + if incompat_str is not None: + run_env.logger.info('{}'.format(incompat_str)) + # end if (no else) + # end if + return compat def adjust_intent(self, src_var): """Add an intent to this Var or adjust its existing intent. @@ -795,8 +403,8 @@ def adjust_intent(self, src_var): self._prop_dict['intent'] = sv_intent # end if - @classmethod - def get_prop(cls, name, spec_type=None): + @staticmethod + def get_prop(name, spec_type=None): """Return VariableProperty object for <name> or None""" prop = None if (spec_type is None) and (name in Var.__var_propdict): @@ -806,80 +414,6 @@ def get_prop(cls, name, spec_type=None): # end if (else prop = None) return prop - @classmethod - def is_horizontal_dimension(cls, dim_name): - """Return True if it is a recognized horizontal - dimension or index, otherwise, return False - >>> Var.is_horizontal_dimension('horizontal_loop_extent') - True - >>> Var.is_horizontal_dimension('ccpp_constant_one:horizontal_loop_extent') - True - >>> Var.is_horizontal_dimension('ccpp_constant_one:horizontal_dimension') - True - >>> Var.is_horizontal_dimension('horizontal_loop_begin:horizontal_loop_end') - True - >>> Var.is_horizontal_dimension('horizontal_loop_begin:horizontal_loop_extent') - False - >>> Var.is_horizontal_dimension('ccpp_constant_one') - False - """ - return dim_name in CCPP_HORIZONTAL_DIMENSIONS - - @classmethod - def is_vertical_dimension(cls, dim_name): - """Return True if it is a recognized vertical - dimension or index, otherwise, return False - >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_layer_dimension') - True - >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_interface_dimension') - True - >>> Var.is_vertical_dimension('vertical_layer_index') - True - >>> Var.is_vertical_dimension('vertical_interface_index') - True - >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_layer_index') - False - >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_interface_index') - False - >>> Var.is_vertical_dimension('horizontal_loop_extent') - False - """ - return dim_name in CCPP_VERTICAL_DIMENSIONS - - @classmethod - def find_horizontal_dimension(cls, dims): - """Return the horizontal dimension string and location in <dims> - or (None, -1). - Return form is (horizontal_dimension, index) where index is the - location of horizontal_dimension in <dims>""" - var_hdim = None - hindex = -1 - for index, dimname in enumerate(dims): - if Var.is_horizontal_dimension(dimname): - var_hdim = dimname - hindex = index - break - # end if - # end for - return (var_hdim, hindex) - - @classmethod - def find_vertical_dimension(cls, dims): - """Return the vertical dimension string and location in <dims> - or (None, -1). - Return form is (vertical_dimension, index) where index is the - location of vertical_dimension in <dims>""" - var_vdim = None - vindex = -1 - for index, dimname in enumerate(dims): - if Var.is_vertical_dimension(dimname): - var_vdim = dimname - vindex = index - break - # end if - # end for - return (var_vdim, vindex) - def var_properties(self): """Return an iterator for this Var's property dictionary""" return self._prop_dict.items() @@ -936,7 +470,7 @@ def clone(self, subst_dict=None, remove_intent=False, # end if psource = ParseSource(source_name, source_type, context) - return Var(cprop_dict, psource, clone_source=self) + return Var(cprop_dict, psource, self.run_env, clone_source=self) def get_prop_value(self, name): """Return the value of key, <name> if <name> is in this variable's @@ -964,24 +498,24 @@ def handle_array_ref(self): """If this Var's local_name is an array ref, add in the array reference indices to the Var's dimensions. Return the (stripped) local_name and the full dimensions. - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() ('foo', []) - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() ('foo', ['ccpp_constant_one:dim1']) - >>> Var({'local_name' : 'foo(:,:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1,ccpp_constant_one:dim2)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + >>> Var({'local_name' : 'foo(:,:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1,ccpp_constant_one:dim2)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() ('foo', ['ccpp_constant_one:dim1', 'ccpp_constant_one:dim2', 'bar']) - >>> Var({'local_name' : 'foo(bar,:)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + >>> Var({'local_name' : 'foo(bar,:)', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() ('foo', ['bar', 'ccpp_constant_one:dim1']) - >>> Var({'local_name' : 'foo(bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo(bar)', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: Call dims mismatch for foo(bar), not enough colons - >>> Var({'local_name' : 'foo(:,bar,:)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo(:,bar,:)', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: Call dims mismatch for foo(:,bar,:), not enough dims - >>> Var({'local_name' : 'foo(:,:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo(:,:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: Call dims mismatch for foo(:,:,bar), not enough dims - >>> Var({'local_name' : 'foo(:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1,ccpp_constant_one:dim2)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo(:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '(ccpp_constant_one:dim1,ccpp_constant_one:dim2)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext()), _MVAR_DUMMY_RUN_ENV).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: Call dims mismatch for foo(:,bar), too many dims """ @@ -1221,35 +755,34 @@ def intrinsic_elements(self, check_dict=None): element_names = None raise ValueError("shouldn't happen?") # To Do, find and process named elements of DDT - else: - children = self.children() - if (not children) and check_dict: - stdname = self.get_prop_value("standard_name") - pvar = check_dict.find_variable(standard_name=stdname, - any_scope=True) - if pvar: - children = pvar.children() - # end if - # end if - if children: - element_names = list() - for child in children: - child_elements = child.intrinsic_elements() - if isinstance(child_elements, str): - child_elements = [child_elements] - # end if - if child_elements: - for elem in child_elements: - if elem: - element_names.append(elem) - # end if - # end for - # end if - # end for - else: - element_names = self.get_prop_value('standard_name') + # end if + children = self.children() + if (not children) and check_dict: + stdname = self.get_prop_value("standard_name") + pvar = check_dict.find_variable(standard_name=stdname, + any_scope=True) + if pvar: + children = pvar.children() # end if # end if + if children: + element_names = list() + for child in children: + child_elements = child.intrinsic_elements() + if isinstance(child_elements, str): + child_elements = [child_elements] + # end if + if child_elements: + for elem in child_elements: + if elem: + element_names.append(elem) + # end if + # end for + # end if + # end for + else: + element_names = self.get_prop_value('standard_name') + # end if return element_names @classmethod @@ -1325,6 +858,11 @@ def host_interface_var(self): """True iff self is included in the host model interface calls""" return self.source.type == 'host' + @property + def run_env(self): + """Return the CCPPFrameworkEnv object used to create this Var object.""" + return self.__run_env + def get_dimensions(self): """Return a list with the variable's dimension strings""" dims = self.valid_value('dimensions') @@ -1362,7 +900,7 @@ def has_horizontal_dimension(self, dims=None): else: vdims = dims # end if - return Var.find_horizontal_dimension(vdims)[0] + return find_horizontal_dimension(vdims)[0] def has_vertical_dimension(self, dims=None): """Return vertical dimension standard name string for @@ -1373,7 +911,7 @@ def has_vertical_dimension(self, dims=None): else: vdims = dims # end if - return Var.find_vertical_dimension(vdims)[0] + return find_vertical_dimension(vdims)[0] def write_def(self, outfile, indent, wdict, allocatable=False, dummy=False, add_intent=None, extra_space=0): @@ -1409,6 +947,7 @@ def write_def(self, outfile, indent, wdict, allocatable=False, dimstr = '' # end if protected = self.get_prop_value('protected') + polymorphic = self.get_prop_value('polymorphic') if dummy: intent = self.get_prop_value('intent') else: @@ -1429,7 +968,7 @@ def write_def(self, outfile, indent, wdict, allocatable=False, if protected and dummy: intent_str = 'intent(in) ' elif allocatable: - if dimstr: + if dimstr or polymorphic: intent_str = 'allocatable ' else: intent_str = ' '*13 @@ -1453,8 +992,13 @@ def write_def(self, outfile, indent, wdict, allocatable=False, comma = ' ' # end if if self.is_ddt(): - dstr = "type({kind}){cspc}{intent} :: {name}{dims} ! {sname}" - cspc = comma + ' '*(extra_space + 13 - len(kind)) + if polymorphic: + dstr = "class({kind}){cspc}{intent} :: {name}{dims} ! {sname}" + cspc = comma + ' '*(extra_space + 12 - len(kind)) + else: + dstr = "type({kind}){cspc}{intent} :: {name}{dims} ! {sname}" + cspc = comma + ' '*(extra_space + 13 - len(kind)) + # end if else: if kind: dstr = "{type}({kind}){cspc}{intent} :: {name}{dims} ! {sname}" @@ -1482,7 +1026,7 @@ def __str__(self): def __repr__(self): """Object representation for Var objects""" - base = super(Var, self).__repr__() + base = super().__repr__() pind = base.find(' object ') if pind >= 0: pre = base[0:pind] @@ -1500,7 +1044,39 @@ def __repr__(self): ############################################################################### -class VarSpec(object): +class FortranVar(Var): + """A class to hold the metadata for a Fortran variable which can + contain properties not used in CCPP metadata. + """ + + __fortran_props = [VariableProperty('optional', bool, + optional_in=True, default_in=False)] + + def __init__(self, prop_dict, source, run_env, context=None, + clone_source=None): + """Initialize a FortranVar object. + """ + + # Remove and save any Fortran-only properties + save_dict = {} + for prop in self.__fortran_props: + if prop.name in prop_dict: + save_dict[prop.name] = prop_dict[prop.name] + del prop_dict[prop.name] + # end if + # end for + # Initialize Var + super().__init__(prop_dict, source, run_env, context=context, + clone_source=clone_source) + # Now, restore the saved properties + for prop in save_dict: + self._prop_dict[prop] = save_dict[prop] + # end for + + +############################################################################### + +class VarSpec: """A class to hold a standard_name description of a variable. A scalar variable is just a standard name while an array also contains a comma-separated list of dimension standard names in parentheses. @@ -1539,7 +1115,8 @@ def __repr__(self): ############################################################################### -def ccpp_standard_var(std_name, source_type, context=None, intent='out'): +def ccpp_standard_var(std_name, source_type, run_env, + context=None, intent='out'): """If <std_name> is a CCPP standard variable name, return a variable with that name. Otherwise return None. @@ -1556,7 +1133,7 @@ def ccpp_standard_var(std_name, source_type, context=None, intent='out'): if source_type.lower() == 'scheme': vdict['intent'] = intent # end if - newvar = Var(vdict, psource) + newvar = Var(vdict, psource, run_env) else: newvar = None # end if @@ -1564,7 +1141,7 @@ def ccpp_standard_var(std_name, source_type, context=None, intent='out'): ############################################################################### -class VarAction(object): +class VarAction: """A base class for variable actions such as loop substitutions or temporary variable handling.""" @@ -1633,7 +1210,7 @@ def __init__(self, missing_stdname, required_stdnames, # end try # end if self._set_action = set_action - super(VarLoopSubst, self).__init__() + super().__init__() def has_subst(self, vadict, any_scope=False): """Determine if variables for the required standard names of this @@ -1653,7 +1230,7 @@ def has_subst(self, vadict, any_scope=False): # end for return subst_list - def add_local(self, vadict, source): + def add_local(self, vadict, source, run_env): """Add a Var created from the missing name to <vadict>""" if self.missing_stdname not in vadict: lname = self._local_name @@ -1661,15 +1238,15 @@ def add_local(self, vadict, source): prop_dict = {'standard_name':self.missing_stdname, 'local_name':local_name, 'type':'integer', 'units':'count', 'dimensions':'()'} - var = Var(prop_dict, source) - vadict.add_variable(var, exists_ok=True, gen_unique=True) + var = Var(prop_dict, source, run_env) + vadict.add_variable(var, run_env, exists_ok=True, gen_unique=True) # end if def equiv(self, vmatch): """Return True iff <vmatch> is equivalent to <self>. Equivalence is determined by matching the missing standard name and the required standard names""" - is_equiv = super(VarLoopSubst, self).equiv(vmatch) + is_equiv = super().equiv(vmatch) if is_equiv: is_equiv = vmatch.missing_stdname == self.missing_stdname # end if @@ -1779,45 +1356,46 @@ class VarDictionary(OrderedDict): The dictionary is organized by standard_name. It is an error to try to add a variable if its standard name is already in the dictionary. Scoping is a tree of VarDictionary objects. - >>> VarDictionary('foo') + >>> VarDictionary('foo', _MVAR_DUMMY_RUN_ENV) VarDictionary(foo) - >>> VarDictionary('bar', variables={}) + >>> VarDictionary('bar', _MVAR_DUMMY_RUN_ENV, variables={}) VarDictionary(bar) - >>> VarDictionary('baz', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))) #doctest: +ELLIPSIS + >>> VarDictionary('baz', _MVAR_DUMMY_RUN_ENV, variables=Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)) #doctest: +ELLIPSIS VarDictionary(baz, [('hi_mom', <__main__.Var hi_mom: foo at 0x...>)]) - >>> print("{}".format(VarDictionary('baz', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))))) + >>> print("{}".format(VarDictionary('baz', _MVAR_DUMMY_RUN_ENV, variables=Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)))) VarDictionary(baz, ['hi_mom']) - >>> VarDictionary('qux', [Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]) #doctest: +ELLIPSIS + >>> VarDictionary('qux', _MVAR_DUMMY_RUN_ENV, variables=[Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)]) #doctest: +ELLIPSIS VarDictionary(qux, [('hi_mom', <__main__.Var hi_mom: foo at 0x...>)]) - >>> VarDictionary('boo').add_variable(Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))) + >>> VarDictionary('boo', _MVAR_DUMMY_RUN_ENV).add_variable(Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV), _MVAR_DUMMY_RUN_ENV) - >>> VarDictionary('who', variables=[Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]).prop_list('local_name') + >>> VarDictionary('who', _MVAR_DUMMY_RUN_ENV, variables=[Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)]).prop_list('local_name') ['foo'] - >>> VarDictionary('who', variables=[Var({'local_name' : 'who_var1', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext())),Var({'local_name' : 'who_var', 'standard_name' : 'bye_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]).new_internal_variable_name() + >>> VarDictionary('who', _MVAR_DUMMY_RUN_ENV, variables=[Var({'local_name' : 'who_var1', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV),Var({'local_name' : 'who_var', 'standard_name' : 'bye_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)]).new_internal_variable_name() 'who_var2' - >>> VarDictionary('who', variables=[Var({'local_name' : 'who_var1', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]).new_internal_variable_name(prefix='bar') + >>> VarDictionary('who', _MVAR_DUMMY_RUN_ENV, variables=[Var({'local_name' : 'who_var1', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)]).new_internal_variable_name(prefix='bar') 'bar' - >>> VarDictionary('glitch', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))).add_variable(Var({'local_name' : 'bar', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname2', 'DDT', ParseContext()))) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> VarDictionary('glitch', _MVAR_DUMMY_RUN_ENV, variables=Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()), _MVAR_DUMMY_RUN_ENV)).add_variable(Var({'local_name' : 'bar', 'standard_name' : 'hi_mom', 'units' : 'm s-1', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname2', 'DDT', ParseContext()), _MVAR_DUMMY_RUN_ENV), _MVAR_DUMMY_RUN_ENV) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid Duplicate standard name, 'hi_mom', at <standard input>: """ - def __init__(self, name, variables=None, parent_dict=None, logger=None): + def __init__(self, name, run_env, variables=None, + parent_dict=None): """Unlike dict, VarDictionary only takes a Var or Var list""" - super(VarDictionary, self).__init__() - self._name = name - self._logger = logger - self._parent_dict = parent_dict + super().__init__() + self.__name = name + self.__run_env = run_env + self.__parent_dict = parent_dict if parent_dict is not None: parent_dict.add_sub_scope(self) # end if - self._sub_dicts = list() - self._local_names = {} # local names in use + self.__sub_dicts = list() + self.__local_names = {} # local names in use if isinstance(variables, Var): - self.add_variable(variables) + self.add_variable(variables, run_env) elif isinstance(variables, list): for var in variables: - self.add_variable(var) + self.add_variable(var, run_env) # end for elif isinstance(variables, VarDictionary): for stdname in variables.keys(): @@ -1837,12 +1415,12 @@ def __init__(self, name, variables=None, parent_dict=None, logger=None): @property def name(self): """Return this dictionary's name""" - return self._name + return self.__name @property def parent(self): """Return the parent dictionary of this dictionary""" - return self._parent_dict + return self.__parent_dict @staticmethod def include_var_in_list(var, std_vars, loop_vars, consts): @@ -1864,11 +1442,11 @@ def include_var_in_list(var, std_vars, loop_vars, consts): def variable_list(self, recursive=False, std_vars=True, loop_vars=True, consts=True): """Return a list of all variables""" - if recursive and (self._parent_dict is not None): - vlist = self._parent_dict.variable_list(recursive=recursive, - std_vars=std_vars, - loop_vars=loop_vars, - consts=consts) + if recursive and (self.__parent_dict is not None): + vlist = self.__parent_dict.variable_list(recursive=recursive, + std_vars=std_vars, + loop_vars=loop_vars, + consts=consts) else: vlist = list() # end if @@ -1881,7 +1459,7 @@ def variable_list(self, recursive=False, # end for return vlist - def add_variable(self, newvar, exists_ok=False, gen_unique=False, + def add_variable(self, newvar, run_env, exists_ok=False, gen_unique=False, adjust_intent=False): """Add <newvar> if it does not conflict with existing entries If <exists_ok> is True, attempting to add an identical copy is okay. @@ -1892,10 +1470,10 @@ def add_variable(self, newvar, exists_ok=False, gen_unique=False, cvar = self.find_variable(standard_name=standard_name, any_scope=False) if (standard_name in self) and (not exists_ok): # We already have a matching variable, error! - if self._logger is not None: + if self.__run_env.logger is not None: emsg = "Attempt to add duplicate variable, {} from {}" - self._logger.error(emsg.format(standard_name, - newvar.source.name)) + self.__run_env.logger.error(emsg.format(standard_name, + newvar.source.name)) # end if emsg = "(duplicate) standard name in {}" if cvar is not None: @@ -1905,7 +1483,7 @@ def add_variable(self, newvar, exists_ok=False, gen_unique=False, token=standard_name, context=newvar.context) # end if if cvar is not None: - compat, reason = cvar.compatible(newvar, logger=self._logger) + compat = cvar.compatible(newvar, run_env) if compat: # Check for intent mismatch vintent = cvar.get_prop_value('intent') @@ -1932,17 +1510,18 @@ def add_variable(self, newvar, exists_ok=False, gen_unique=False, # end if # end if else: - if self._logger is not None: + if self.__run_env.logger is not None: emsg = "Attempt to add incompatible variable, {} from {}" - emsg += "\n{}".format(reason) - self._logger.error(emsg.format(standard_name, - newvar.source.name)) + emsg += "\n{}".format(compat.incompat_reason) + self.__run_env.logger.error(emsg.format(standard_name, + newvar.source.name)) # end if nlname = newvar.get_prop_value('local_name') clname = cvar.get_prop_value('local_name') cstr = context_string(cvar.context, with_comma=True) errstr = "new variable, {}, incompatible {} between {}{} and" - raise ParseSyntaxError(errstr.format(nlname, reason, + raise ParseSyntaxError(errstr.format(nlname, + compat.incompat_reason, clname, cstr), token=standard_name, context=newvar.context) @@ -1975,8 +1554,8 @@ def add_variable(self, newvar, exists_ok=False, gen_unique=False, self[standard_name] = newvar # end if lname = lname.lower() - if lname not in self._local_names: - self._local_names[lname] = standard_name + if lname not in self.__local_names: + self.__local_names[lname] = standard_name # end if def remove_variable(self, standard_name): @@ -2013,10 +1592,11 @@ def add_variable_dimensions(self, var, ignore_sources, to_dict=None, dvar = self.find_variable(standard_name=dimname, any_scope=True) if dvar and (dvar.source.type not in ignore_sources): if to_dict: - to_dict.add_variable(dvar, exists_ok=True, + to_dict.add_variable(dvar, self.__run_env, + exists_ok=True, adjust_intent=adjust_intent) else: - self.add_variable(dvar, exists_ok=True, + self.add_variable(dvar, self.__run_env, exists_ok=True, adjust_intent=adjust_intent) # end if else: @@ -2072,14 +1652,14 @@ def find_variable(self, standard_name=None, source_var=None, var = CCPP_CONSTANT_VARS[standard_name] elif standard_name in self: var = self[standard_name] - elif any_scope and (self._parent_dict is not None): + elif any_scope and (self.__parent_dict is not None): src_clist = search_call_list - var = self._parent_dict.find_variable(standard_name=standard_name, - source_var=source_var, - any_scope=any_scope, - clone=clone, - search_call_list=src_clist, - loop_subst=loop_subst) + var = self.__parent_dict.find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, + clone=clone, + search_call_list=src_clist, + loop_subst=loop_subst) else: var = None # end if @@ -2095,8 +1675,8 @@ def find_local_name(self, local_name, any_scope=False): or return None if no such variable is currently in the dictionary""" pvar = None lname = local_name.lower() # Case is insensitive for local names - if lname in self._local_names: - stdname = self._local_names[lname] + if lname in self.__local_names: + stdname = self.__local_names[lname] pvar = self.find_variable(standard_name=stdname, any_scope=False) if not pvar: emsg = 'VarDictionary {} should have standard_name, {}, ' @@ -2104,9 +1684,9 @@ def find_local_name(self, local_name, any_scope=False): raise ParseInternalError(emsg.format(self.name, stdname, local_name)) # end if (no else, pvar is fine) - elif any_scope and (self._parent_dict is not None): - pvar = self._parent_dict.find_local_name(local_name, - any_scope=any_scope) + elif any_scope and (self.__parent_dict is not None): + pvar = self.__parent_dict.find_local_name(local_name, + any_scope=any_scope) # end if return pvar @@ -2119,7 +1699,7 @@ def find_error_variables(self, any_scope=False, clone_as_out=False): """ err_vars = list() # Look for the combo of errflg and errmsg - errflg = self.find_variable(standard_name="ccpp_error_flag", + errflg = self.find_variable(standard_name="ccpp_error_code", any_scope=any_scope) errmsg = self.find_variable(standard_name="ccpp_error_message", any_scope=any_scope) @@ -2145,11 +1725,11 @@ def find_error_variables(self, any_scope=False, clone_as_out=False): def add_sub_scope(self, sub_dict): """Add a child dictionary to enable traversal""" - self._sub_dicts.append(sub_dict) + self.__sub_dicts.append(sub_dict) def sub_dictionaries(self): """Return a list of this dictionary's sub-dictionaries""" - return list(self._sub_dicts) + return list(self.__sub_dicts) def prop_list(self, prop_name, std_vars=True, loop_vars=True, consts=True): """Return a list of the <prop_name> property for each variable. @@ -2177,10 +1757,10 @@ def declare_variables(self, outfile, indent, dummy=False, # end if # end for - def merge(self, other_dict): + def merge(self, other_dict, run_env): """Add new entries from <other_dict>""" for ovar in other_dict.variable_list(): - self.add_variable(ovar) + self.add_variable(ovar, run_env) # end for @staticmethod @@ -2207,7 +1787,7 @@ def __str__(self): def __repr__(self): """Return an unique representation for this object""" - srepr = super(VarDictionary, self).__repr__() + srepr = super().__repr__() vstart = len("VarDictionary") + 1 if len(srepr) > vstart + 1: comma = ", " @@ -2296,7 +1876,7 @@ def find_loop_subst(self, standard_name, any_scope=True, context=None): if dict_var is not None: var_one = CCPP_CONSTANT_VARS['ccpp_constant_one'] my_var = (var_one, dict_var) - if self._logger is not None: + if self.__run_env.logger is not None: lstr = "loop_subst: found {}{}" logger_str = lstr.format(standard_name, context_string(context)) @@ -2307,7 +1887,7 @@ def find_loop_subst(self, standard_name, any_scope=True, context=None): for x in loop_var] if None not in my_vars: my_var = tuple(my_vars) - if self._logger is not None: + if self.__run_env.logger is not None: names = [x.get_prop_value('local_name') for x in my_vars] lstr = "loop_subst: {} ==> ({}){}" @@ -2316,7 +1896,7 @@ def find_loop_subst(self, standard_name, any_scope=True, context=None): context_string(context)) # end if else: - if self._logger is not None: + if self.__run_env.logger is not None: lstr = "loop_subst: {} ==> (??) FAILED{}" logger_str = lstr.format(standard_name, context_string(context)) @@ -2325,7 +1905,7 @@ def find_loop_subst(self, standard_name, any_scope=True, context=None): # end if # end if else: - if self._logger is not None: + if self.__run_env.logger is not None: lstr = "loop_subst: {} is not a loop variable{}" logger_str = lstr.format(standard_name, context_string(context)) @@ -2333,7 +1913,7 @@ def find_loop_subst(self, standard_name, any_scope=True, context=None): my_var = None # end if if logger_str is not None: - self._logger.debug(logger_str) + self.__run_env.logger.debug(logger_str) # end if return my_var @@ -2357,7 +1937,7 @@ def new_internal_variable_name(self, prefix=None, max_len=63): else: var_prefix = '{}'.format(prefix) # end if - varlist = [x for x in self._local_names.keys() if var_prefix in x] + varlist = [x for x in self.__local_names.keys() if var_prefix in x] newvar = None while newvar is None: if index == 0: @@ -2378,9 +1958,10 @@ def new_internal_variable_name(self, prefix=None, max_len=63): ############################################################################### # List of constant variables which are universally available -CCPP_CONSTANT_VARS = VarDictionary('CCPP_CONSTANT_VARS', - [ccpp_standard_var('ccpp_constant_one', - 'module')]) +CCPP_CONSTANT_VARS = \ + VarDictionary('CCPP_CONSTANT_VARS', _MVAR_DUMMY_RUN_ENV, + variables=[ccpp_standard_var('ccpp_constant_one', 'module', + _MVAR_DUMMY_RUN_ENV)]) ############################################################################### if __name__ == "__main__": diff --git a/scripts/mkcap.py b/scripts/mkcap.py index 06e59d37..d0c63a99 100755 --- a/scripts/mkcap.py +++ b/scripts/mkcap.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Script to generate a cap module and subroutines # from a scheme xml file. diff --git a/scripts/mkdoc.py b/scripts/mkdoc.py index 0f55f2aa..39a2ea83 100755 --- a/scripts/mkdoc.py +++ b/scripts/mkdoc.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Functions to generate basic documentation in HTML and LaTeX for CCPP metadata diff --git a/scripts/mkstatic.py b/scripts/mkstatic.py index 88485803..8f04d220 100755 --- a/scripts/mkstatic.py +++ b/scripts/mkstatic.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # import collections diff --git a/scripts/parse_tools/__init__.py b/scripts/parse_tools/__init__.py index 5c566c45..03dd0429 100644 --- a/scripts/parse_tools/__init__.py +++ b/scripts/parse_tools/__init__.py @@ -29,6 +29,7 @@ from preprocess import PreprocStack from xml_tools import find_schema_file, find_schema_version from xml_tools import read_xml_file, validate_xml_file +from xml_tools import PrettyElementTree # pylint: enable=wrong-import-position __all__ = [ @@ -61,6 +62,7 @@ 'ParseSyntaxError', 'ParseObject', 'PreprocStack', + 'PrettyElementTree', 'register_fortran_ddt_name', 'read_xml_file', 'registered_fortran_ddt_name', diff --git a/scripts/parse_tools/parse_checkers.py b/scripts/parse_tools/parse_checkers.py index 351b1381..a3b4f185 100755 --- a/scripts/parse_tools/parse_checkers.py +++ b/scripts/parse_tools/parse_checkers.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Helper functions to validate parsed input""" @@ -12,11 +12,13 @@ ######################################################################## +_UNITS_RE = re.compile(r"^[^/@#$%^&*()\|<>\[\]{}?,.]+$") + def check_units(test_val, prop_dict, error): """Return <test_val> if a valid unit, otherwise, None if <error> is True, raise an Exception if <test_val> is not valid. - >>> check_units('m/s', None, True) - 'm/s' + >>> check_units('m s-1', None, True) + 'm s-1' >>> check_units('kg m-3', None, True) 'kg m-3' >>> check_units('1', None, True) @@ -33,20 +35,20 @@ def check_units(test_val, prop_dict, error): Traceback (most recent call last): CCPPError: ['foo'] is invalid; not a string """ - if not isinstance(test_val, str): - if error: - raise CCPPError("'{}' is invalid; not a string".format(test_val)) - else: - test_val = None - # end if - else: - if not test_val.strip(): + if isinstance(test_val, str): + if _UNITS_RE.match(test_val.strip()) is None: if error: raise CCPPError("'{}' is not a valid unit".format(test_val)) else: test_val = None # end if # end if + else: + if error: + raise CCPPError("'{}' is invalid; not a string".format(test_val)) + else: + test_val = None + # end if # end if return test_val @@ -206,8 +208,14 @@ def check_cf_standard_name(test_val, prop_dict, error): __FORT_DIM = r"(?:"+__FORTRAN_AID+r"|[:]|"+__FORT_INT+r")" __REPEAT_DIM = r"(?:,\s*"+__FORT_DIM+r"\s*)" __FORTRAN_SCALAR_ARREF = r"[(]\s*("+__FORT_DIM+r"\s*"+__REPEAT_DIM+r"{0,6})[)]" +# FORTRAN_SCALAR_REF: Pattern of a valid Fortran array reference +# NB: Only allows symbols, no expressions and/or function calls FORTRAN_SCALAR_REF = r"(?:"+FORTRAN_ID+r"\s*"+__FORTRAN_SCALAR_ARREF+r")" FORTRAN_SCALAR_REF_RE = re.compile(FORTRAN_SCALAR_REF+r"$") +# FORTRAN_FUNCTION_REF: A Fortran function reference +# NB: Currenly does not support function arguments +FORTRAN_FUNCTION_REF = r"(?:"+FORTRAN_ID+r"\s*[(]\s*[)])" +FORTRAN_FUNCTION_REF_RE = re.compile(FORTRAN_FUNCTION_REF) FORTRAN_INTRINSIC_TYPES = ["integer", "real", "logical", "complex", "double precision", "character"] FORTRAN_DP_RE = re.compile(r"(?i)double\s*precision") diff --git a/scripts/parse_tools/parse_log.py b/scripts/parse_tools/parse_log.py index ec96e4a1..e6561427 100644 --- a/scripts/parse_tools/parse_log.py +++ b/scripts/parse_tools/parse_log.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Shared logger for parse processes""" diff --git a/scripts/parse_tools/parse_object.py b/scripts/parse_tools/parse_object.py index 6f13dd64..bf489de8 100644 --- a/scripts/parse_tools/parse_object.py +++ b/scripts/parse_tools/parse_object.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """A module for the base, ParseObject class""" # CCPP framework imports diff --git a/scripts/parse_tools/parse_source.py b/scripts/parse_tools/parse_source.py index dfcb0432..dd57b4ec 100644 --- a/scripts/parse_tools/parse_source.py +++ b/scripts/parse_tools/parse_source.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """Classes to aid the parsing process""" diff --git a/scripts/parse_tools/preprocess.py b/scripts/parse_tools/preprocess.py index b2ef87f1..96a52089 100755 --- a/scripts/parse_tools/preprocess.py +++ b/scripts/parse_tools/preprocess.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ Classes to parse C preprocessor lines and to maintain a stack to allow inclusion and exclusion of lines based on preprocessor symbol definitions. diff --git a/scripts/parse_tools/xml_tools.py b/scripts/parse_tools/xml_tools.py index a9c207e5..46156c64 100644 --- a/scripts/parse_tools/xml_tools.py +++ b/scripts/parse_tools/xml_tools.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ Parse a host-model registry XML file and return the captured variables. @@ -8,6 +8,7 @@ from __future__ import print_function import os import os.path +import re import subprocess import sys import xml.etree.ElementTree as ET @@ -24,6 +25,12 @@ from parse_log import init_log, set_log_to_null # pylint: enable=wrong-import-position +# Global data +_INDENT_STR = " " +beg_tag_re = re.compile(r"([<][^/][^<>]*[^/][>])") +end_tag_re = re.compile(r"([<][/][^<>/]+[>])") +simple_tag_re = re.compile(r"([<][^/][^<>/]+[/][>])") + # Find python version PY3 = sys.version_info[0] > 2 PYSUBVER = sys.version_info[1] @@ -236,6 +243,114 @@ def read_xml_file(filename, logger=None): ############################################################################### +class PrettyElementTree(ET.ElementTree): + """An ElementTree subclass with nice formatting when writing to a file""" + + def __init__(self, element=None, file=None): + """Initialize a PrettyElementTree object""" + super(PrettyElementTree, self).__init__(element, file) + + def _write(self, outfile, line, indent, eol=os.linesep): + """Write <line> as an ASCII string to <outfile>""" + outfile.write('{}{}{}'.format(_INDENT_STR*indent, line, eol)) + + @staticmethod + def _inc_pos(outstr, text, txt_beg): + """Return a position increment based on the length of <outstr> + or raise an exception if <outstr> is empty. + <text> and <txt_beg> are used to provide some context for the error.""" + if outstr: + return len(outstr) + # end if + txt_end = text[txt_beg].find(">") + txt_beg + 1 + if txt_end <= txt_beg: + txt_end = txt_beg + 256 + # end if + emsg = "No output at {} of {}\n{}".format(txt_beg, len(text), + text[txt_beg:txt_end]) + raise DatatableInternalError(emsg) + + def write(self, file, encoding="us-ascii", xml_declaration=None, + default_namespace=None, method="xml", + short_empty_elements=True): + """Subclassed write method to format output.""" + if PY3 and (PYSUBVER >= 4): + if PYSUBVER >= 8: + input = ET.tostring(self.getroot(), + encoding=encoding, method=method, + xml_declaration=xml_declaration, + default_namespace=default_namespace, + short_empty_elements=short_empty_elements) + else: + input = ET.tostring(self.getroot(), + encoding=encoding, method=method, + short_empty_elements=short_empty_elements) + # end if + else: + input = ET.tostring(self.getroot(), + encoding=encoding, method=method) + # end if + if PY3: + fmode = 'wt' + root = str(input, encoding="utf-8") + else: + fmode = 'w' + root = input + # end if + indent = 0 + last_write_text = False + with open(file, fmode) as outfile: + inline = root.strip() + istart = 0 # Current start pos + iend = len(inline) + while istart < iend: + bmatch = beg_tag_re.match(inline[istart:]) + ematch = end_tag_re.match(inline[istart:]) + smatch = simple_tag_re.match(inline[istart:]) + if bmatch is not None: + outstr = bmatch.group(1) + if inline[istart + len(bmatch.group(1))] != '<': + # Print text on same line + self._write(outfile, outstr, indent, eol='') + else: + self._write(outfile, outstr, indent) + # end if + indent += 1 + istart += self._inc_pos(outstr, inline, istart) + last_write_text = False + elif ematch is not None: + outstr = ematch.group(1) + indent -= 1 + if last_write_text: + self._write(outfile, outstr, 0) + last_write_text = False + else: + self._write(outfile, outstr, indent) + # end if + istart += self._inc_pos(outstr, inline, istart) + elif smatch is not None: + outstr = smatch.group(1) + self._write(outfile, outstr, indent) + istart += self._inc_pos(outstr, inline, istart) + last_write_text = False + else: + # No tag, just output text + end_index = inline[istart:].find('<') + if end_index < 0: + end_index = iend + else: + end_index += istart + # end if + outstr = inline[istart:end_index] + self._write(outfile, outstr.strip(), 0, eol='') + last_write_text = True + istart += self._inc_pos(outstr, inline, istart) + # end if + # end while + # end with + +############################################################################## + if __name__ == "__main__": _LOGGER = init_log('xml_tools') set_log_to_null(_LOGGER) diff --git a/scripts/state_machine.py b/scripts/state_machine.py index 8af4e571..f802d7bd 100644 --- a/scripts/state_machine.py +++ b/scripts/state_machine.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # """Classes and methods to implement a simple state machine.""" @@ -11,7 +11,7 @@ ############################################################################### -class StateMachine(object): +class StateMachine: """Class and methods to implement a simple state machine. Note, a collections.UserDict would be nice here but it is not in python 2. >>> StateMachine() diff --git a/scripts/suite_objects.py b/scripts/suite_objects.py new file mode 100644 index 00000000..caa19ec8 --- /dev/null +++ b/scripts/suite_objects.py @@ -0,0 +1,1908 @@ +#!/usr/bin/env python3 +# + +"""Classes and methods to create a Fortran suite-implementation file +to implement calls to a set of suites for a given host model.""" + +# Python library imports +import logging +import re +import xml.etree.ElementTree as ET +# CCPP framework imports +from ccpp_state_machine import CCPP_STATE_MACH, RUN_PHASE_NAME +from code_block import CodeBlock +from constituents import ConstituentVarDict +from framework_env import CCPPFrameworkEnv +from metavar import Var, VarDictionary, VarLoopSubst +from metavar import CCPP_CONSTANT_VARS, CCPP_LOOP_VAR_STDNAMES +from parse_tools import ParseContext, ParseSource, context_string +from parse_tools import ParseInternalError, CCPPError +from parse_tools import init_log, set_log_to_null +from var_props import is_horizontal_dimension, find_horizontal_dimension +from var_props import find_vertical_dimension + +# pylint: disable=too-many-lines + +############################################################################### +# Module (global) variables +############################################################################### + +_OBJ_LOC_RE = re.compile(r"(0x[0-9A-Fa-f]+)>") +_BLANK_DIMS_RE = re.compile(r"[(][:](,:)*[)]$") + +# Source for internally generated variables. +_API_SOURCE_NAME = "CCPP_API" +# Use the constituent source type for consistency +_API_SUITE_VAR_NAME = ConstituentVarDict.constitutent_source_type() +_API_GROUP_VAR_NAME = "group" +_API_SCHEME_VAR_NAME = "scheme" +_API_LOCAL_VAR_NAME = "local" +_API_LOCAL_VAR_TYPES = [_API_LOCAL_VAR_NAME, _API_SUITE_VAR_NAME] +_API_CONTEXT = ParseContext(filename="ccpp_suite.py") +_API_SOURCE = ParseSource(_API_SOURCE_NAME, _API_SCHEME_VAR_NAME, _API_CONTEXT) +_API_LOCAL = ParseSource(_API_SOURCE_NAME, _API_LOCAL_VAR_NAME, _API_CONTEXT) +_API_TIMESPLIT_TAG = 'time_split' +_API_PROCESSSPLIT_TAG = 'process_split' +_API_LOGGING = init_log('ccpp_suite') +set_log_to_null(_API_LOGGING) +_API_DUMMY_RUN_ENV = CCPPFrameworkEnv(_API_LOGGING, + ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) + +############################################################################### +def new_suite_object(item, context, parent, run_env): +############################################################################### + "'Factory' method to create the appropriate suite object from XML" + new_item = None + if item.tag == 'subcycle': + new_item = Subcycle(item, context, parent, run_env) + elif item.tag == 'scheme': + new_item = Scheme(item, context, parent, run_env) + elif item.tag == _API_TIMESPLIT_TAG: + new_item = TimeSplit(item, context, parent, run_env) + else: + emsg = "Unknown CCPP suite element type, '{}'" + raise CCPPError(emsg.format(item.tag)) + # end if + return new_item + +############################################################################### + +class CallList(VarDictionary): + """A simple class to hold a routine's call list (dummy arguments)""" + + def __init__(self, name, run_env, routine=None): + """Initialize this call list. + <name> is the name of this dictionary. + <routine> is a pointer to the routine for which this is a call list + or None for a routine that is not a SuiteObject. + """ + self.__routine = routine + super().__init__(name, run_env) + + def add_vars(self, call_list, run_env, gen_unique=False): + """Add new variables from another CallList (<call_list>)""" + for var in call_list.variable_list(): + stdname = var.get_prop_value('standard_name') + if stdname not in self: + self.add_variable(var, run_env, gen_unique=gen_unique) + # end if + # end for + + def call_string(self, cldicts=None, is_func_call=False, subname=None): + """Return a dummy argument string for this call list. + <cldict> may be a list of VarDictionary objects to search for + local_names (default is to use self). + <is_func_call> should be set to True to construct a call statement. + If <is_func_call> is False, construct a subroutine dummy argument + list. + """ + arg_str = "" + arg_sep = "" + for var in self.variable_list(): + # Do not include constants + stdname = var.get_prop_value('standard_name') + if stdname not in CCPP_CONSTANT_VARS: + # Find the dummy argument name + dummy = var.get_prop_value('local_name') + # Now, find the local variable name + if cldicts is not None: + for cldict in cldicts: + dvar = cldict.find_variable(standard_name=stdname, + any_scope=False) + if dvar is not None: + break + # end if + # end for + if dvar is None: + if subname is not None: + errmsg = "{}: ".format(subname) + else: + errmsg = "" + # end if + errmsg += "'{}', not found in call list for '{}'" + clnames = [x.name for x in cldicts] + raise CCPPError(errmsg.format(stdname, clnames)) + # end if + lname = dvar.get_prop_value('local_name') + else: + cldict = None + aref = var.array_ref(local_name=dummy) + if aref is not None: + lname = aref.group(1) + else: + lname = dummy + # end if + # end if + if is_func_call: + if cldicts is not None: + use_dicts = cldicts + else: + use_dicts = [self] + # end if + run_phase = self.routine.run_phase() + # We only need dimensions for suite variables in run phase + need_dims = SuiteObject.is_suite_variable(dvar) and run_phase + vdims = var.call_dimstring(var_dicts=use_dicts, + explicit_dims=need_dims, + loop_subst=run_phase) + if _BLANK_DIMS_RE.match(vdims) is None: + lname = lname + vdims + # end if + # end if + if is_func_call: + arg_str += "{}{}={}".format(arg_sep, dummy, lname) + else: + arg_str += "{}{}".format(arg_sep, lname) + # end if + arg_sep = ", " + # end if + # end for + return arg_str + + @property + def routine(self): + """Return the routine for this call list (or None)""" + return self.__routine + +############################################################################### + +class SuiteObject(VarDictionary): + """Base class for all CCPP Suite objects (e.g., Scheme, Subcycle) + SuiteObjects have an internal dictionary for variables created for + execution of the SuiteObject. These variables will be allocated and + managed at the Group level (unless cross-group usage or persistence + requires handling at the Suite level). + SuiteObjects also have a call list which is a list of variables which + are passed to callable SuiteObjects (e.g., Scheme). + """ + + def __init__(self, name, context, parent, run_env, + active_call_list=False, variables=None, phase_type=None): + # pylint: disable=too-many-arguments + self.__name = name + self.__context = context + self.__parent = parent + self.__run_env = run_env + if active_call_list: + self.__call_list = CallList(name + '_call_list', run_env, + routine=self) + else: + self.__call_list = None + # end if + self.__parts = list() + self.__needs_vertical = None + self.__needs_horizontal = None + self.__phase_type = phase_type + # Initialize our dictionary + super().__init__(self.name, run_env, + variables=variables, parent_dict=parent) + + def declarations(self): + """Return a list of local variables to be declared in parent Group + or Suite. By default, this list is the object's embedded VarDictionary. + """ + return self.variable_list() + + def add_part(self, item, replace=False): + """Add an object (e.g., Scheme, Subcycle) to this SuiteObject. + If <item> needs to be in a VerticalLoop, look for an appropriate + VerticalLoop object or create one. + if <replace> is True, replace <item> in its current position in self. + Note that if <item> is not to be inserted in a VerticalLoop, + <replace> has no effect. + """ + if replace: + if item in self.__parts: + index = self.__parts.index(item) + else: + emsg = 'Cannot replace {} in {}, not a member' + raise ParseInternalError(emsg.format(item.name, self.name)) + # end if + else: + if item in self.__parts: + emsg = 'Cannot add {} to {}, already a member' + raise ParseInternalError(emsg.format(item.name, self.name)) + # end if + index = len(self.__parts) + # end if + # Does this item need to be in a VerticalLoop? + if item.needs_vertical is not None: + iparent = item.parent + if isinstance(self, VerticalLoop): + # It is being added to a VerticalLoop, call it good + pass + elif isinstance(iparent, VerticalLoop): + # Why are we doing this? + emsg = ('Trying to add {} {} to {} {} but it is already ' + 'in VerticalLoop {}') + raise ParseInternalError(emsg.format(item.__class__.__name__, + item.name, + self.__class__.__name__, + self.name, iparent.name)) + else: + pitem = iparent.part(-1, error=False) + added = False + if isinstance(pitem, VerticalLoop): + # Can we attach item to this loop? + if pitem.dimension_name == item.needs_vertical: + pitem.add_part(item) + if replace: + self.remove_part(index) + # end if (no else, we already added it) + added = True + # end if + # end if + if not added: + # Need to add item to a new VerticalLoop + # We are in the process of providing the vertical coord + vert_index = item.needs_vertical + item.needs_vertical = None + new_vl = VerticalLoop(vert_index, self.__context, + self, self.run_env, items=[item]) + if replace: + self.remove_part(index) + # end if (no else, adding the loop below) + self.__parts.insert(index, new_vl) + item.reset_parent(new_vl) + # end if + # end if + else: + # Just add <item> + self.__parts.insert(index, item) + item.reset_parent(self) + # end if + + def remove_part(self, index): + """Remove the SuiteObject part at index""" + plen = len(self.__parts) + if (0 <= index < plen) or (abs(index) <= plen): + del self.__parts[index] + else: + errmsg = "Invalid index for remove_part, {}, ".format(index) + if plen > 0: + errmsg += "SuiteObject only has {} parts".format(plen) + else: + errmsg += "SuiteObject only has no parts" + # end if + raise ParseInternalError(errmsg, context=self.__context) + # end if + + def schemes(self): + """Return a flattened list of schemes for this SuiteObject""" + schemes = list() + for item in self.__parts: + schemes.extend(item.schemes()) + # end for + return schemes + + def move_part(self, part, source_object, loc=-1): + """Operator to move <part> from <source_object> to <self>. + If <loc> is -1, <part> is appended to <self>, + otherwise, <part> is inserted at <loc>. + """ + if part in source_object.parts: + # Sanitize loc + try: + iloc = int(loc) + except ValueError: + errmsg = "Invalid loc value for move_part, {}".format(loc) + raise ParseInternalError(errmsg, context=self.__context) + # end try + if iloc == -1: + self.__parts.append(part) + else: + self.__parts.insert(iloc, part) + # end if + index = source_object.index(part) + source_object.remove_part(index) + # <part> now has a new parent + part.reset_parent(self) + + def reset_parent(self, new_parent): + """Reset the parent of this SuiteObject (which has been moved)""" + self.__parent = new_parent + + def phase(self): + """Return the CCPP state phase_type for this SuiteObject""" + trans = self.phase_type + if trans is None: + if self.parent is not None: + trans = self.parent.phase() + else: + trans = False + # end if + # end if + return trans + + def run_phase(self): + """Return True iff this SuiteObject is in a run phase group""" + return self.phase() == RUN_PHASE_NAME + + def timestep_phase(self): + '''Return True iff this SuiteObject is in a timestep initial or + timestep final phase group''' + phase = self.phase() + return (phase is not None) and ('timestep' in phase) + + def register_action(self, vaction): + """Register (i.e., save information for processing during write stage) + <vaction> and return True or pass <vaction> up to the parent of + <self>. Return True if any level registers <vaction>, False otherwise. + The base class will not register any action, it must be registered in + an override of this method. + """ + if self.parent is not None: + return self.parent.register_action(vaction) + # end if + return False + + @classmethod + def is_suite_variable(cls, var): + """Return True iff <var> belongs to our Suite""" + return var and (var.source.type == _API_SUITE_VAR_NAME) + + def is_local_variable(self, var): + """Return the local variable matching <var> if one is found belonging + to this object or any of its SuiteObject parents.""" + stdname = var.get_prop_value('standard_name') + lvar = None + obj = self + while (not lvar) and (obj is not None) and isinstance(obj, SuiteObject): + lvar = obj.find_variable(standard_name=stdname, any_scope=False, + search_call_list=False) + if not lvar: + obj = obj.parent + # end if + # end while + return lvar + + def add_call_list_variable(self, newvar, exists_ok=False, + gen_unique=False, subst_dict=None): + """Add <newvar> to this SuiteObject's call_list. If this SuiteObject + does not have a call list, recursively try the SuiteObject's parent + If <subst_dict> is not None, create a clone using that as a dictionary + of substitutions. + Do not add <newvar> if it exists as a local variable. + Do not add <newvar> if it is a suite variable""" + stdname = newvar.get_prop_value('standard_name') + if self.parent: + pvar = self.parent.find_variable(standard_name=stdname, + source_var=newvar, + any_scope=False) + else: + pvar = None + # end if + if SuiteObject.is_suite_variable(pvar): + pass # Do not add suite variable to a call list + elif self.is_local_variable(newvar): + pass # Do not add to call list, it is owned by a SuiteObject + elif self.call_list is not None: + if (stdname in CCPP_LOOP_VAR_STDNAMES) and (not self.run_phase()): + errmsg = 'Attempting to use loop variable {} in {} phase' + raise CCPPError(errmsg.format(stdname, self.phase())) + # end if + # Do we need a clone? + if isinstance(self, Group): + stype = _API_GROUP_VAR_NAME + else: + stype = None + # end if + if stype or subst_dict: + oldvar = newvar + if subst_dict is None: + subst_dict = {} + # end if + # Make sure that this variable has an intent + if ((oldvar.get_prop_value("intent") is None) and + ("intent" not in subst_dict)): + subst_dict["intent"] = "in" + # end if + newvar = oldvar.clone(subst_dict, source_name=self.name, + source_type=stype, context=self.context) + # end if + self.call_list.add_variable(newvar, self.run_env, + exists_ok=exists_ok, + gen_unique=gen_unique, + adjust_intent=True) + # We need to make sure that this variable's dimensions are available + for vardim in newvar.get_dim_stdnames(include_constants=False): + dvar = self.find_variable(standard_name=vardim, + any_scope=True) + if dvar is None: + emsg = "{}: Could not find dimension {} in {}" + raise ParseInternalError(emsg.format(self.name, + stdname, vardim)) + # end if + elif self.parent is None: + errmsg = 'No call_list found for {}'.format(newvar) + raise ParseInternalError(errmsg) + elif pvar: + # Check for call list incompatibility + if pvar is not None: + compat, reason = pvar.compatible(newvar, self.run_env) + if not compat: + emsg = 'Attempt to add incompatible variable to call list:' + emsg += '\n{} from {} is not compatible with {} from {}' + nlreason = newvar.get_prop_value(reason) + plreason = pvar.get_prop_value(reason) + emsg += '\nreason = {} ({} != {})'.format(reason, + nlreason, + plreason) + nlname = newvar.get_prop_value('local_name') + plname = pvar.get_prop_value('local_name') + raise CCPPError(emsg.format(nlname, newvar.source.name, + plname, pvar.source.name)) + # end if + # end if (no else, variable already in call list) + else: + self.parent.add_call_list_variable(newvar, exists_ok=exists_ok, + gen_unique=gen_unique, + subst_dict=subst_dict) + # end if + + def add_variable_to_call_tree(self, var, vmatch=None, subst_dict=None): + """Add <var> to <self>'s call_list (or a parent if <self> does not + have an active call_list). + If <vmatch> is not None, also add the loop substitution variables + which must be present. + If <subst_dict> is not None, create a clone using that as a dictionary + of substitutions. + """ + found_dims = False + if var is not None: + self.add_call_list_variable(var, exists_ok=True, + gen_unique=True, subst_dict=subst_dict) + found_dims = True + # end if + if vmatch is not None: + svars = vmatch.has_subst(self, any_scope=True) + if svars is None: + found_dims = False + else: + found_dims = True + for svar in svars: + self.add_call_list_variable(svar, exists_ok=True) + # end for + # Register the action (probably at Group level) + self.register_action(vmatch) + # end if + # end if + return found_dims + + def vert_dim_match(self, vloop_subst): + """If self is or is a part of a VerticalLoop object for + the substitute index for <vloop_subst>, return the substitute + loop index standard name, otherwise, return None. + """ + dim_match = None + parent = self + if len(vloop_subst.required_stdnames) != 1: + errmsg = 'vert_dim_match can only handle one substitute index' + raise ParseInternalError(errmsg) + # end if + index_dim = vloop_subst.required_stdnames[0] + while parent is not None: + if isinstance(parent, VerticalLoop) and (parent.name == index_dim): + dim_match = index_dim + break + # end if + parent = parent.parent + # end for + return dim_match + + def horiz_dim_match(self, ndim, hdim, nloop_subst): + """Find a match between <ndim> and <hdim>, if they are both + horizontal dimensions. + If <ndim> == <hdim>, return <ndim>. + If <nloop_subst> is not None and its required standard names exist + in our extended dictionary, return them. + Otherwise, return None. + NB: Loop substitutions are only allowed during the run phase but in + other phases, horizontal_dimension and horizontal_loop_extent + are the same. + """ + dim_match = None + nis_hdim = is_horizontal_dimension(ndim) + his_hdim = is_horizontal_dimension(hdim) + if nis_hdim and his_hdim: + if ndim == hdim: + dim_match = ndim + elif self.run_phase() and (nloop_subst is not None): + svars = nloop_subst.has_subst(self, any_scope=True) + match = svars is not None + if match: + if isinstance(self, Scheme): + obj = self.parent + else: + obj = self + # end if + for svar in svars: + obj.add_call_list_variable(svar, exists_ok=True) + # end for + dim_match = ':'.join(nloop_subst.required_stdnames) + # end if + elif not self.run_phase(): + if ((hdim == 'ccpp_constant_one:horizontal_dimension') and + (ndim == 'ccpp_constant_one:horizontal_loop_extent')): + dim_match = hdim + elif ((hdim == 'ccpp_constant_one:horizontal_dimension') and + (ndim == 'horizontal_loop_begin:horizontal_loop_end')): + dim_match = hdim + # end if (no else, there is no non-run-phase match) + # end if (no else, there is no match) + # end if (no else, there is no match) + return dim_match + + @staticmethod + def dim_match(need_dim, have_dim): + """Test whether <need_dim> matches <have_dim>. + If they match, return the matching dimension (which may be + modified by, e.g., a loop substitution). + If they do not match, return None. + """ + match = None + # First, try for all the marbles + if need_dim == have_dim: + match = need_dim + # end if + # Is one side missing a one start? + if not match: + ndims = need_dim.split(':') + hdims = have_dim.split(':') + if len(ndims) > len(hdims): + if ndims[0].lower == 'ccpp_constant_one': + ndims = ndims[1:] + elif hdims[0].lower == 'ccpp_constant_one': + hdims = hdims[1:] + # end if (no else) + # Last try + match = ndims == hdims + # end if + # end if + + return match + + def match_dimensions(self, need_dims, have_dims): + """Compare dimensions between <need_dims> and <have_dims>. + Return 6 items: + 1) Return True if all dims match. + If <have_dims> has a vertical dimension and <need_dims> does not + but all other dimensions match, return False but include the + missing dimension index as the third return value. + 2) Return <need_dims> modified, if necessary to + reflect the available limits. + 3) Return have_dims modified, if necessary to reflect + any loop substitutions. If no substitutions, return None + This is done so that the correct dimensions are used in the host cap. + 4) Return the name of the missing vertical index, or None + 5) Return a permutation array if the dimension ordering is + different (or None if the ordering is the same). Each element of the + permutation array is the index in <have_dims> for that dimension of + <need_dims>. + 6) Finally, return a 'reason' string. If match (first return value) is + False, this string will contain information about the reason for + the match failure. + >>> SuiteObject('foo', _API_CONTEXT, None, _API_DUMMY_RUN_ENV).match_dimensions(['horizontal_loop_extent'], ['horizontal_loop_extent']) + (True, ['horizontal_loop_extent'], ['horizontal_loop_extent'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,_API_DUMMY_RUN_ENV,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL,_API_DUMMY_RUN_ENV),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV)],active_call_list=True,phase_type='initialize').match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['ccpp_constant_one:horizontal_dimension']) + (True, ['ccpp_constant_one:horizontal_dimension'], ['ccpp_constant_one:horizontal_dimension'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,_API_DUMMY_RUN_ENV,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['horizontal_loop_begin:horizontal_loop_end']) + (True, ['horizontal_loop_begin:horizontal_loop_end'], ['horizontal_loop_begin:horizontal_loop_end'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,_API_DUMMY_RUN_ENV,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['horizontal_loop_begin:horizontal_loop_end','ccpp_constant_one:vertical_layer_dimension']) + (False, ['horizontal_loop_begin:horizontal_loop_end', 'vertical_layer_index'], ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], 'vertical_layer_index', None, 'missing vertical dimension') + >>> SuiteObject('foo', _API_CONTEXT,None,_API_DUMMY_RUN_ENV,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent','ccpp_constant_one:vertical_layer_dimension'], ['horizontal_loop_begin:horizontal_loop_end','ccpp_constant_one:vertical_layer_dimension']) + (True, ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,_API_DUMMY_RUN_ENV,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL, _API_DUMMY_RUN_ENV)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent','ccpp_constant_one:vertical_layer_dimension'], ['ccpp_constant_one:vertical_layer_dimension','horizontal_loop_begin:horizontal_loop_end']) + (True, ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], ['ccpp_constant_one:vertical_layer_dimension', 'horizontal_loop_begin:horizontal_loop_end'], None, [1, 0], '') + """ + new_need_dims = [] + new_have_dims = list(have_dims) + perm = [] + match = True + missing_vert_dim = None + reason = '' + nlen = len(need_dims) + hlen = len(have_dims) + _, nvdim_index = find_vertical_dimension(need_dims) + _, hvdim_index = find_vertical_dimension(have_dims) + _, nhdim_index = find_horizontal_dimension(need_dims) + _, hhdim_index = find_horizontal_dimension(have_dims) + if hhdim_index < 0 <= nhdim_index: + match = False + nlen = 0 # To skip logic below + hlen = 0 # To skip logic below + reason = '{hname}{hctx} is missing a horizontal dimension ' + reason += 'required by {nname}{nctx}' + # end if + for nindex in range(nlen): + neddim = need_dims[nindex] + if nindex == nhdim_index: + # Look for a horizontal dimension match + vmatch = VarDictionary.loop_var_match(neddim) + hmatch = self.horiz_dim_match(neddim, have_dims[hhdim_index], + vmatch) + if hmatch: + perm.append(hhdim_index) + new_need_dims.append(hmatch) + new_have_dims[hhdim_index] = hmatch + found_ndim = True + else: + found_ndim = False + # end if + else: + # Find the first dimension in have_dims that matches neddim + found_ndim = False + if nvdim_index < 0 <= hvdim_index: + skip = hvdim_index + else: + skip = -1 + # end if + hdim_indices = [x for x in range(hlen) + if (x not in perm) and (x != skip)] + for hindex in hdim_indices: + if (hindex != hvdim_index) or (nvdim_index >= 0): + hmatch = self.dim_match(neddim, have_dims[hindex]) + if hmatch: + perm.append(hindex) + new_need_dims.append(hmatch) + new_have_dims[hindex] = hmatch + found_ndim = True + break + # end if + # end if + # end if + # end for + if not found_ndim: + match = False + reason = 'Could not find dimension, ' + neddim + ', in ' + reason += '{hname}{hctx}. Needed by {nname}{nctx}' + break + # end if (no else, we are still okay) + # end for + # Find a missing vertical dimension index, if necessary + if nvdim_index < 0 <= hvdim_index: + # We need to make a substitution for the vertical + # coordinate in have_dims + vvmatch = VarDictionary.loop_var_match(have_dims[hvdim_index]) + if vvmatch: + vmatch_dims = ':'.join(vvmatch.required_stdnames) + # See if the missing vertical dimensions exist + missing_vert_dim = None + for mstdname in vvmatch.required_stdnames: + mvdim = self.find_variable(standard_name=mstdname, + any_scope=True) + if not mvdim: + missing_vert_dim = vmatch_dims + match = False # Should trigger vertical loop action + reason = 'missing vertical dimension' + break + # end if + # end for + # While we have a missing vertical dimension which has been + # created, do NOT enter the substitution into have_dims. + # The supplied variable still has a vertical dimension. + # On the other hand, we *do* need to add the new vertical + # loop index to new_need_dims. Try to put it in the correct + # place for easy calling from the existing variable. + # Also update perm to match the array access + if hvdim_index < len(new_need_dims): + # Insert the vertical loop dimension + if hvdim_index > 0: + before = new_need_dims[0:hvdim_index] + perm_before = perm[0:hvdim_index] + else: + before = [] + perm_before = [] + # end if + after = new_need_dims[hvdim_index:] + new_need_dims = before + [vmatch_dims] + after + perm = perm_before + [hvdim_index] + perm[hvdim_index:] + else: + new_need_dims.append(vmatch_dims) + perm.append(hvdim_index) + # end if + else: + emsg = "Unknown vertical dimension dimension, '{}'" + raise CCPPError(emsg.format(have_dims[hvdim_index])) + # end if + else: + missing_vert_dim = None + # end if + perm_test = list(range(hlen)) + # If no permutation is found, reset to None + if perm == perm_test: + perm = None + elif (not match) and (missing_vert_dim is None): + perm = None + # end if (else, return perm as is) + if new_have_dims == have_dims: + have_dims = None # Do not make any substitutions + # end if + return match, new_need_dims, new_have_dims, missing_vert_dim, perm, reason + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Find a matching variable to <var>, create a local clone (if + <clone> is True), or return None. + First search the SuiteObject's internal dictionary, then its + call list (unless <skip_call_list> is True, then any parent + dictionary (if <any_scope> is True). + <var> can be a Var object or a standard_name string. + <loop_subst> is not used by this version of <find_variable>. + """ + # First, search our local dictionary + if standard_name is None: + if source_var is None: + emsg = "One of <standard_name> or <source_var> must be passed." + raise ParseInternalError(emsg) + # end if + standard_name = source_var.get_prop_value('standard_name') + elif source_var is not None: + stest = source_var.get_prop_value('standard_name') + if stest != standard_name: + emsg = ("<standard_name> and <source_var> must match if " + + "both are passed.") + raise ParseInternalError(emsg) + # end if + # end if + scl = search_call_list + stdname = standard_name + # Don't clone yet, might find the variable further down + found_var = super().find_variable(standard_name=stdname, + source_var=source_var, + any_scope=False, clone=None, + search_call_list=scl, + loop_subst=loop_subst) + if (not found_var) and (self.call_list is not None) and scl: + # Don't clone yet, might find the variable further down + found_var = self.call_list.find_variable(standard_name=stdname, + source_var=source_var, + any_scope=False, + clone=None, + search_call_list=scl, + loop_subst=loop_subst) + # end if + loop_okay = VarDictionary.loop_var_okay(stdname, self.run_phase()) + if not loop_okay: + loop_subst = False + # end if + if (found_var is None) and any_scope and (self.parent is not None): + # We do not have the variable, look to parents. + found_var = self.parent.find_variable(standard_name=stdname, + source_var=source_var, + any_scope=True, + clone=clone, + search_call_list=scl, + loop_subst=loop_subst) + # end if + return found_var + + def match_variable(self, var, vstdname=None, vdims=None): + """Try to find a source for <var> in this SuiteObject's dictionary + tree. Several items are returned: + found_var: True if a match was found + vert_dim: The vertical dimension in <var>, or None + call_dims: How this variable should be called (or None if no match) + missing_vert: Vertical dim in parent but not in <var> + perm: Permutation (XXgoldyXX: Not yet implemented) + """ + if vstdname is None: + vstdname = var.get_prop_value('standard_name') + # end if + if vdims is None: + vdims = var.get_dimensions() + # end if + if (not vdims) and self.run_phase(): + vmatch = VarDictionary.loop_var_match(vstdname) + else: + vmatch = None + # end if + found_var = False + missing_vert = None + new_vdims = list() + var_vdim = var.has_vertical_dimension(dims=vdims) + # Does this variable exist in the calling tree? + dict_var = self.find_variable(source_var=var, any_scope=True) + if dict_var is None: + # No existing variable but add loop var match to call tree + found_var = self.parent.add_variable_to_call_tree(dict_var, + vmatch=vmatch) + new_vdims = vdims + elif dict_var.source.type in _API_LOCAL_VAR_TYPES: + # We cannot change the dimensions of locally-declared variables + # Using a loop substitution is invalid because the loop variable + # value has not yet been set. + # Therefore, we have to use the declaration dimensions in the call. + found_var = True + new_vdims = dict_var.get_dimensions() + else: + # Check dimensions + dict_dims = dict_var.get_dimensions() + if vdims: + args = self.parent.match_dimensions(vdims, dict_dims) + match, new_vdims, new_dict_dims, missing_vert, perm, err = args + if perm is not None: + errmsg = "Permuted indices are not yet supported" + lname = var.get_prop_value('local_name') + dstr = ', '.join(vdims) + ctx = context_string(var.context) + errmsg += ", var = {}({}){}".format(lname, dstr, ctx) + raise CCPPError(errmsg) + # end if + else: + new_vdims = list() + new_dict_dims = dict_dims + match = True + # end if + # Add the variable to the parent call tree + if dict_dims == new_dict_dims: + sdict = {} + else: + sdict = {'dimensions':new_dict_dims} + # end if + found_var = self.parent.add_variable_to_call_tree(var, + subst_dict=sdict) + if not match: + found_var = False + if not missing_vert: + nctx = context_string(var.context) + nname = var.get_prop_value('local_name') + hctx = context_string(dict_var.context) + hname = dict_var.get_prop_value('local_name') + raise CCPPError(err.format(nname=nname, nctx=nctx, + hname=hname, hctx=hctx)) + # end if + # end if + # end if + # end if + return found_var, var_vdim, new_vdims, missing_vert + + def in_process_split(self): + """Find out if we are in a process-split region""" + proc_split = False + obj = self + while obj is not None: + if isinstance(obj, ProcessSplit): + proc_split = True + break + # end if + if isinstance(obj, TimeSplit): + break + # end if (other object types do not change status) + obj = obj.parent + # end while + return proc_split + + def part(self, index, error=True): + """Return one of this SuiteObject's parts raise an exception, or, + if <error> is False, just return None""" + plen = len(self.__parts) + if (0 <= index < plen) or (abs(index) <= plen): + return self.__parts[index] + # end if + if error: + errmsg = 'No part {} in {} {}'.format(index, + self.__class__.__name__, + self.name) + raise ParseInternalError(errmsg) + # end if + return None + + def has_item(self, item_name): + """Return True iff item, <item_name>, is already in this SuiteObject""" + has = False + for item in self.__parts: + if item.name == item_name: + has = True + else: + has = item.has_item(item_name) + # end if + if has: + break + # end if + # end for + return has + + @property + def name(self): + """Return the name of the element""" + return self.__name + + @name.setter + def name(self, value): + """Set the name of the element if it has not been set""" + if self.__name is None: + self.__name = value + else: + errmsg = 'Attempt to change name of {} to {}' + raise ParseInternalError(errmsg.format(self, value)) + # end if + + @property + def parent(self): + """This SuiteObject's parent (or none)""" + return self.__parent + + @property + def call_list(self): + """Return the SuiteObject's call_list""" + return self.__call_list + + @property + def phase_type(self): + """Return the phase_type of this suite_object""" + return self.__phase_type + + @property + def parts(self): + """Return a copy the component parts of this SuiteObject. + Returning a copy allows for the part list to be changed during + processing of the return value""" + return self.__parts[:] + + @property + def needs_vertical(self): + """Return the vertical dimension this SuiteObject is missing or None""" + return self.__needs_vertical + + @needs_vertical.setter + def needs_vertical(self, value): + """Reset the missing vertical dimension of this SuiteObject""" + if value is None: + self.__needs_vertical = value + elif self.__needs_vertical is not None: + if self.__needs_vertical != value: + errmsg = ('Attempt to change missing vertical dimension ' + 'from {} to {}') + raise ParseInternalError(errmsg.format(self.__needs_vertical, + value)) + # end if (no else, value is already correct) + else: + self.__needs_vertical = value + # end if + + @property + def context(self): + """Return the context of this SuiteObject""" + return self.__context + + @property + def run_env(self): + """Return the CCPPFrameworkEnv runtime object for this SuiteObject""" + return self.__run_env + + def __repr__(self): + """Create a unique readable string for this Object""" + so_repr = super().__repr__() + olmatch = _OBJ_LOC_RE.search(so_repr) + if olmatch is not None: + loc = ' at {}'.format(olmatch.group(1)) + else: + loc = "" + # end if + return '<{} {}{}>'.format(self.__class__.__name__, self.name, loc) + + def __format__(self, spec): + """Return a string representing the SuiteObject, including its children. + <spec> is used between subitems. + <ind_level> is the indent level for multi-line output. + """ + if spec: + sep = spec[0] + else: + sep = '\n' + # end if + try: + ind_level = int(spec[1:]) + except (ValueError, IndexError): + ind_level = 0 + # end try + if sep == '\n': + indent = " " + else: + indent = "" + # end if + if self.name == self.__class__.__name__: + # This object does not have separate name + nstr = self.name + else: + nstr = "{}: {}".format(self.__class__.__name__, self.name) + # end if + output = "{}<{}>".format(indent*ind_level, nstr) + subspec = "{}{}".format(sep, ind_level + 1) + substr = "{o}{s}{p:" + subspec + "}" + subout = "" + for part in self.parts: + subout = substr.format(o=subout, s=sep, p=part) + # end for + if subout: + output = "{}{}{}{}</{}>".format(output, subout, sep, + indent*ind_level, + self.__class__.__name__) + else: + output = "{}</{}>".format(output, self.__class__.__name__) + # end if + return output + +############################################################################### + +class Scheme(SuiteObject): + """A single scheme in a suite (e.g., init method)""" + + def __init__(self, scheme_xml, context, parent, run_env): + """Initialize this physics Scheme""" + name = scheme_xml.text + self.__subroutine_name = None + self.__context = context + self.__version = scheme_xml.get('version', None) + self.__lib = scheme_xml.get('lib', None) + self.__has_vertical_dimension = False + self.__group = None + super().__init__(name, context, parent, run_env, active_call_list=True) + + def update_group_call_list_variable(self, var): + """If <var> is in our group's call list, update its intent. + Add <var> to our group's call list unless: + - <var> is in our group's call list + - <var> is in our group's dictionary, + - <var> is a suite variable""" + stdname = var.get_prop_value('standard_name') + my_group = self.__group + gvar = my_group.call_list.find_variable(standard_name=stdname, + any_scope=False) + if gvar: + gvar.adjust_intent(var) + else: + gvar = my_group.find_variable(standard_name=stdname, + any_scope=False) + if gvar is None: + # Check for suite variable + gvar = my_group.find_variable(standard_name=stdname, + any_scope=True) + if gvar and (not SuiteObject.is_suite_variable(gvar)): + gvar = None + # end if + if gvar is None: + my_group.add_call_list_variable(var) + # end if + # end if + + def is_local_variable(self, var): + """Return None as we never consider <var> to be in our local + dictionary. + This is an override of the SuiteObject version""" + return None + + def analyze(self, phase, group, scheme_library, suite_vars, level): + """Analyze the scheme's interface to prepare for writing""" + self.__group = group + my_header = None + if self.name in scheme_library: + func = scheme_library[self.name] + if phase in func: + my_header = func[phase] + self.__subroutine_name = my_header.title + # end if + else: + estr = 'No schemes found for {}' + raise ParseInternalError(estr.format(self.name), + context=self.__context) + # end if + if my_header is None: + estr = 'No {} header found for scheme, {}' + raise ParseInternalError(estr.format(phase, self.name), + context=self.__context) + # end if + if my_header.module is None: + estr = 'No module found for subroutine, {}' + raise ParseInternalError(estr.format(self.subroutine_name), + context=self.__context) + # end if + scheme_mods = set() + scheme_mods.add((my_header.module, self.subroutine_name)) + for var in my_header.variable_list(): + vstdname = var.get_prop_value('standard_name') + def_val = var.get_prop_value('default_value') + vdims = var.get_dimensions() + vintent = var.get_prop_value('intent') + args = self.match_variable(var, vstdname=vstdname, vdims=vdims) + found, vert_dim, new_dims, missing_vert = args + if found: + if not self.has_vertical_dim: + self.__has_vertical_dimension = vert_dim is not None + # end if + # We have a match, make sure var is in call list + if new_dims == vdims: + self.add_call_list_variable(var, exists_ok=True) + self.update_group_call_list_variable(var) + else: + subst_dict = {'dimensions':new_dims} + clone = var.clone(subst_dict) + self.add_call_list_variable(clone, exists_ok=True) + self.update_group_call_list_variable(clone) + # end if + else: + if missing_vert is not None: + # This Scheme needs to be in a VerticalLoop + self.needs_vertical = missing_vert + break # Deal with this and come back + # end if + if vintent == 'out': + if self.__group is None: + errmsg = 'Group not defined for {}'.format(self.name) + raise ParseInternalError(errmsg) + # end if + # The Group will manage this variable + self.__group.manage_variable(var) + self.add_call_list_variable(var) + elif def_val and (vintent != 'out'): + if self.__group is None: + errmsg = 'Group not defined for {}'.format(self.name) + raise ParseInternalError(errmsg) + # end if + # The Group will manage this variable + self.__group.manage_variable(var) + # We still need it in our call list (the group uses a clone) + self.add_call_list_variable(var) + else: + errmsg = 'Input argument for {}, {}, not found.' + if self.find_variable(source_var=var) is not None: + # The variable exists, maybe it is dim mismatch + lname = var.get_prop_value('local_name') + emsg = '\nCheck for dimension mismatch in {}' + errmsg += emsg.format(lname) + # end if + if ((not self.run_phase()) and + (vstdname in CCPP_LOOP_VAR_STDNAMES)): + emsg = '\nLoop variables not allowed in {} phase.' + errmsg += emsg.format(self.phase()) + # end if + raise CCPPError(errmsg.format(self.subroutine_name, + vstdname)) + # end if + # end if + # end for + if self.needs_vertical is not None: + self.parent.add_part(self, replace=True) # Should add a vloop + if isinstance(self.parent, VerticalLoop): + # Restart the loop analysis + scheme_mods = self.parent.analyze(phase, group, scheme_library, + suite_vars, level) + # end if + # end if + return scheme_mods + + def write(self, outfile, errcode, indent): + # Unused arguments are for consistent write interface + # pylint: disable=unused-argument + """Write code to call this Scheme to <outfile>""" + # Dictionaries to try are our group, the group's call list, + # or our module + cldicts = [self.__group, self.__group.call_list] + cldicts.extend(self.__group.suite_dicts()) + my_args = self.call_list.call_string(cldicts=cldicts, + is_func_call=True, + subname=self.subroutine_name) + stmt = 'call {}({})' + outfile.write('if ({} == 0) then'.format(errcode), indent) + outfile.write(stmt.format(self.subroutine_name, my_args), indent+1) + outfile.write('end if', indent) + + def schemes(self): + """Return self as a list for consistency with subcycle""" + return [self] + + def variable_list(self, recursive=False, + std_vars=True, loop_vars=True, consts=True): + """Return a list of all variables for this Scheme. + Because Schemes do not have any variables, return a list + of this object's CallList variables instead. + Note that because of this, <recursive=True> is not allowed.""" + if recursive: + raise ParseInternalError("recursive=True not allowed for Schemes") + # end if + return self.call_list.variable_list(recursive=recursive, + std_vars=std_vars, + loop_vars=loop_vars, consts=consts) + + @property + def subroutine_name(self): + """Return this scheme's actual subroutine name""" + return self.__subroutine_name + + @property + def has_vertical_dim(self): + """Return True if at least one of this Scheme's variables has + a vertical dimension (vertical_layer_dimension or + vertical_interface_dimension) + """ + return self.__has_vertical_dimension + + def __str__(self): + """Create a readable string for this Scheme""" + return '<Scheme {}: {}>'.format(self.name, self.subroutine_name) + +############################################################################### + +class VerticalLoop(SuiteObject): + """Class to call a group of schemes or scheme collections in a + loop over a vertical dimension.""" + + def __init__(self, index_name, context, parent, run_env, items=None): + """ <index_name> is the standard name of the variable holding the + number of iterations (e.g., vertical_layer_dimension).""" + # self._dim_name is the standard name for the number of iterations + self._dim_name = VarDictionary.find_loop_dim_from_index(index_name) + if self._dim_name is None: + errmsg = 'No VerticalLoop dimension name for index = {}' + raise ParseInternalError(errmsg.format(index_name)) + # end if + if ':' in self._dim_name: + dims = self._dim_name.split(':') + if not dims[1]: + errmsg = 'Invalid loop dimension, {}' + raise ParseInternalError(errmsg.format(self._dim_name)) + # end if + self._dim_name = dims[1] + # end if + # self._local_dim_name is the variable name for self._dim_name + self._local_dim_name = None + super().__init__(index_name, context, parent, run_env) + if run_env.logger and run_env.logger.isEnabledFor(logging.DEBUG): + lmsg = "Adding VerticalLoop for '{}'" + run_env.logger.debug(lmsg.format(index_name)) + # end if + # Add any items + if not isinstance(items, list): + if items is None: + items = list() + else: + items = [items] + # end if + # end if + for item in items: + self.add_part(item) + # end for + + def analyze(self, phase, group, scheme_library, suite_vars, level): + """Analyze the VerticalLoop's interface to prepare for writing""" + # Handle all the suite objects inside of this subcycle + scheme_mods = set() + # Create a variable for the loop index + newvar = Var({'local_name':self.name, 'standard_name':self.name, + 'type':'integer', 'units':'count', 'dimensions':'()'}, + _API_LOCAL, self.run_env) + # The Group will manage this variable + group.manage_variable(newvar) + # Find the loop-extent variable + dim_name = self._dim_name + local_dim = group.find_variable(standard_name=dim_name, any_scope=False) + if local_dim is None: + local_dim = group.call_list.find_variable(standard_name=dim_name, + any_scope=False) + # end if + if local_dim is None: + emsg = 'No variable found for vertical loop dimension {}' + raise ParseInternalError(emsg.format(self._dim_name)) + # end if + self._local_dim_name = local_dim.get_prop_value('local_name') + emsg = "VerticalLoop local name for '{}'".format(self.name) + emsg += " is '{}".format(self.dimension_name) + if self.run_env.logger: + self.run_env.logger.debug(emsg) + # end if + # Analyze our internal items + for item in self.parts: + smods = item.analyze(phase, group, scheme_library, + suite_vars, level+1) + for smod in smods: + scheme_mods.add(smod) + # end for + # end for + return scheme_mods + + def write(self, outfile, errcode, indent): + """Write code for the vertical loop, including contents, to <outfile>""" + outfile.write('do {} = 1, {}'.format(self.name, self.dimension_name), + indent) + # Note that 'scheme' may be a sybcycle or other construct + for item in self.parts: + item.write(outfile, errcode, indent+1) + # end for + outfile.write('end do', 2) + + @property + def dimension_name(self): + """Return the vertical dimension over which this VerticalLoop loops""" + return self._local_dim_name + +############################################################################### + +class Subcycle(SuiteObject): + """Class to represent a subcycled group of schemes or scheme collections""" + + def __init__(self, sub_xml, context, parent, run_env): + name = sub_xml.get('name', None) # Iteration count + loop_extent = sub_xml.get('loop', "1") # Number of iterations + # See if our loop variable is an interger or a variable + try: + loop_int = int(loop_extent) # pylint: disable=unused-variable + self._loop = loop_extent + self._loop_var_int = True + except ValueError: + self._loop_var_int = False + lvar = parent.find_variable(standard_name=self.loop, any_scope=True) + if lvar is None: + emsg = "Subcycle, {}, specifies {} iterations but {} not found" + raise CCPPError(emsg.format(name, self.loop, self.loop)) + # end if + parent.add_call_list_variable(lvar) + # end try + super().__init__(name, context, parent, run_env) + for item in sub_xml: + new_item = new_suite_object(item, context, self, run_env) + self.add_part(new_item) + # end for + + def analyze(self, phase, group, scheme_library, suite_vars, level): + """Analyze the Subcycle's interface to prepare for writing""" + if self.name is None: + self.name = "subcycle_index{}".format(level) + # end if + # Create a variable for the loop index + self.add_variable(Var({'local_name':self.name, + 'standard_name':'loop_variable', + 'type':'integer', 'units':'count', + 'dimensions':'()'}, _API_SOURCE, self.run_env), + self.run_env) + # Handle all the suite objects inside of this subcycle + scheme_mods = set() + for item in self.parts: + smods = item.analyze(phase, group, scheme_library, + suite_vars, level+1) + for smod in smods: + scheme_mods.add(smod) + # end for + # end for + return scheme_mods + + def write(self, outfile, errcode, indent): + """Write code for the subcycle loop, including contents, to <outfile>""" + outfile.write('do {} = 1, {}'.format(self.name, self.loop), indent) + # Note that 'scheme' may be a sybcycle or other construct + for item in self.parts: + item.write(outfile, errcode, indent+1) + # end for + outfile.write('end do', 2) + + @property + def loop(self): + """Return the loop value or variable local_name""" + lvar = self.find_variable(standard_name=self.loop, any_scope=True) + if lvar is None: + emsg = "Subcycle, {}, specifies {} iterations but {} not found" + raise CCPPError(emsg.format(self.name, self.loop, self.loop)) + # end if + lname = lvar.get_prop_value('local_name') + return lname + +############################################################################### + +class TimeSplit(SuiteObject): + """Class to represent a group of processes to be computed in a time-split + manner -- each parameterization or other construct is called with an + state which has been updated from the previous step. + """ + + def __init__(self, sub_xml, context, parent, run_env): + super().__init__('TimeSplit', context, parent, run_env) + for part in sub_xml: + new_item = new_suite_object(part, context, self, run_env) + self.add_part(new_item) + # end for + + def analyze(self, phase, group, scheme_library, suite_vars, level): + # Unused arguments are for consistent analyze interface + # pylint: disable=unused-argument + """Analyze the TimeSplit's interface to prepare for writing""" + # Handle all the suite objects inside of this group + scheme_mods = set() + for item in self.parts: + smods = item.analyze(phase, group, scheme_library, + suite_vars, level+1) + for smod in smods: + scheme_mods.add(smod) + # end for + # end for + return scheme_mods + + def write(self, outfile, errcode, indent): + """Write code for this TimeSplit section, including contents, + to <outfile>""" + for item in self.parts: + item.write(outfile, errcode, indent) + # end for + +############################################################################### + +class ProcessSplit(SuiteObject): + """Class to represent a group of processes to be computed in a + process-split manner -- all parameterizations or other constructs are + called with the same state. + NOTE: Currently a stub + """ + + def __init__(self, sub_xml, context, parent, run_env): + # Unused arguments are for consistent __init__ interface + # pylint: disable=unused-argument + super().__init__('ProcessSplit', context, parent, run_env) + raise CCPPError('ProcessSplit not yet implemented') + + def analyze(self, phase, group, scheme_library, suite_vars, level): + # Unused arguments are for consistent analyze interface + # pylint: disable=unused-argument + """Analyze the ProcessSplit's interface to prepare for writing""" + # Handle all the suite objects inside of this group + raise CCPPError('ProcessSplit not yet implemented') + + def write(self, outfile, errcode, indent): + """Write code for this ProcessSplit section, including contents, + to <outfile>""" + raise CCPPError('ProcessSplit not yet implemented') + +############################################################################### + +class Group(SuiteObject): + """Class to represent a grouping of schemes in a suite + A Group object is implemented as a subroutine callable by the API. + The main arguments to a group are the host model variables. + Additional output arguments are generated from schemes with intent(out) + arguments. + Additional input or inout arguments are generated for inputs needed by + schemes which are produced (intent(out)) by other groups. + """ + + __subhead = ''' + subroutine {subname}({args}) +''' + + __subend = ''' + end subroutine {subname} + +! ======================================================================== +''' + + __thread_check = CodeBlock([('#ifdef _OPENMP', -1), + ('if (omp_get_thread_num() > 1) then', 1), + ('{errcode} = 1', 2), + (('{errmsg} = "Cannot call {phase} routine ' + 'from a threaded region"'), 2), + ('return', 2), + ('end if', 1), + ('#endif', -1)]) + + __process_types = [_API_TIMESPLIT_TAG, _API_PROCESSSPLIT_TAG] + + __process_xml = {} + for gptype in __process_types: + __process_xml[gptype] = '<{ptype}></{ptype}>'.format(ptype=gptype) + # end for + + def __init__(self, group_xml, transition, parent, context, run_env): + """Initialize this Group object from <group_xml>. + <transition> is the group's phase, <parent> is the group's suite. + """ + name = parent.name + '_' + group_xml.get('name') + if transition not in CCPP_STATE_MACH.transitions(): + errmsg = "Bad transition argument to Group, '{}'" + raise ParseInternalError(errmsg.format(transition)) + # end if + # Initialize the dictionary of variables internal to group + super().__init__(name, context, parent, run_env, + active_call_list=True, phase_type=transition) + # Add the items but first make sure we know the process type for + # the group (e.g., TimeSplit or ProcessSplit). + if (transition == RUN_PHASE_NAME) and ((not group_xml) or + (group_xml[0].tag not in + Group.__process_types)): + # Default is TimeSplit + tsxml = ET.fromstring(Group.__process_xml[_API_TIMESPLIT_TAG]) + time_split = new_suite_object(tsxml, context, self, run_env) + add_to = time_split + self.add_part(time_split) + else: + add_to = self + # end if + # Add the sub objects either directly to the Group or to the TimeSplit + for item in group_xml: + new_item = new_suite_object(item, context, add_to, run_env) + add_to.add_part(new_item) + # end for + self._local_schemes = set() + self._host_vars = None + self._host_ddts = None + self._loop_var_matches = list() + self._phase_check_stmts = list() + self._set_state = None + self._ddt_library = None + + def phase_match(self, scheme_name): + """If scheme_name matches the group phase, return the group and + function ID. Otherwise, return None + """ + fid, tid, _ = CCPP_STATE_MACH.transition_match(scheme_name, + transition=self.phase()) + if tid is not None: + return self, fid + # end if + return None, None + + def move_to_call_list(self, standard_name): + """Move a variable from the group internal dictionary to the call list. + This is done when the variable, <standard_name>, will be allocated by + the suite. + """ + gvar = self.find_variable(standard_name=standard_name, any_scope=False) + if gvar is None: + errmsg = "Group {}, cannot move {}, variable not found" + raise ParseInternalError(errmsg.format(self.name, standard_name)) + # end if + self.add_call_list_variable(gvar, exists_ok=True) + self.remove_variable(standard_name) + + def register_action(self, vaction): + """Register any recognized <vaction> type for use during self.write. + Return True iff <vaction> is handled. + """ + if isinstance(vaction, VarLoopSubst): + self._loop_var_matches = vaction.add_to_list(self._loop_var_matches) + # Add the missing dim + vaction.add_local(self, _API_LOCAL, self.run_env) + return True + # end if + return False + + def manage_variable(self, newvar): + """Add <newvar> to our local dictionary making necessary + modifications to the variable properties so that it is + allocated appropriately""" + # Need new prop dict to eliminate unwanted properties (e.g., intent) + vdims = newvar.get_dimensions() + # Look for dimensions where we have a loop substitution and replace + # with the correct size + if self.run_phase(): + hdims = [x.missing_stdname for x in self._loop_var_matches] + else: + # Do not do loop substitutions in full phases + hdims = list() + # end if + for index, dim in enumerate(vdims): + newdim = None + for subdim in dim.split(':'): + if subdim in hdims: + # We have a loop substitution, find and replace + hindex = hdims.index(subdim) + names = self._loop_var_matches[hindex].required_stdnames + newdim = ':'.join(names) + break + # end if + if ('vertical' in subdim) and ('index' in subdim): + # We have a vertical index, replace with correct dimension + errmsg = "vertical index replace not implemented" + raise ParseInternalError(errmsg) + # end if + # end for + if newdim is not None: + vdims[index] = newdim + # end if + # end for + if self.timestep_phase(): + persist = 'timestep' + else: + persist = 'run' + # end if + # Start with an official copy of <newvar>'s prop_dict with + # corrected dimensions + subst_dict = {'dimensions':vdims} + prop_dict = newvar.copy_prop_dict(subst_dict=subst_dict) + # Add the allocatable items + prop_dict['allocatable'] = len(vdims) > 0 # No need to allocate scalar + prop_dict['persistence'] = persist + # This is a local variable + if 'intent' in prop_dict: + del prop_dict['intent'] + # end if + # Create a new variable, save the original context + local_var = Var(prop_dict, + ParseSource(_API_SOURCE_NAME, + _API_LOCAL_VAR_NAME, newvar.context), + self.run_env) + self.add_variable(local_var, self.run_env, exists_ok=True) + # Finally, make sure all dimensions are accounted for + emsg = self.add_variable_dimensions(local_var, _API_LOCAL_VAR_TYPES, + adjust_intent=True, + to_dict=self.call_list) + if emsg: + raise CCPPError(emsg) + # end if + + def analyze(self, phase, suite_vars, scheme_library, ddt_library, + check_suite_state, set_suite_state): + """Analyze the Group's interface to prepare for writing""" + self._ddt_library = ddt_library + # Sanity check for Group + if phase != self.phase(): + errmsg = 'Group {} has phase {} but analyze is phase {}' + raise ParseInternalError(errmsg.format(self.name, + self.phase(), phase)) + # end if + for item in self.parts: + # Items can be schemes, subcycles or other objects + # All have the same interface and return a set of module use + # statements (lschemes) + lschemes = item.analyze(phase, self, scheme_library, suite_vars, 1) + for lscheme in lschemes: + self._local_schemes.add(lscheme) + # end for + # end for + self._phase_check_stmts = check_suite_state + self._set_state = set_suite_state + if (self.run_env.logger and + self.run_env.logger.isEnabledFor(logging.DEBUG)): + self.run_env.logger.debug("{}".format(self)) + # end if + + def allocate_dim_str(self, dims, context): + """Create the dimension string for an allocate statement""" + rdims = list() + for dim in dims: + rdparts = list() + dparts = dim.split(':') + for dpart in dparts: + dvar = self.find_variable(standard_name=dpart, any_scope=False) + if dvar is None: + dvar = self.call_list.find_variable(standard_name=dpart, + any_scope=False) + if dvar is None: + emsg = "Dimension variable, '{}', not found{}" + lvar = self.find_local_name(dpart, any_scope=True) + if lvar is not None: + emsg += "\nBe sure to use standard names!" + # end if + ctx = context_string(context) + raise CCPPError(emsg.format(dpart, ctx)) + # end if + lname = dvar.get_prop_value('local_name') + rdparts.append(lname) + # end for + rdims.append(':'.join(rdparts)) + # end for + return ', '.join(rdims) + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Find a matching variable to <var>, create a local clone (if + <clone> is True), or return None. + This purpose of this special Group version is to record any constituent + variable found for processing during the write phase. + """ + fvar = super().find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, clone=clone, + search_call_list=search_call_list, + loop_subst=loop_subst) + if fvar and fvar.is_constituent(): + if fvar.source.type == ConstituentVarDict.constitutent_source_type(): + # We found this variable in the constituent dictionary, + # add it to our call list + self.add_call_list_variable(fvar, exists_ok=True) + # end if + # end if + return fvar + + def write(self, outfile, host_arglist, indent, const_mod, + suite_vars=None, allocate=False, deallocate=False): + """Write code for this subroutine (Group), including contents, + to <outfile>""" + # Unused arguments are for consistent write interface + # pylint: disable=unused-argument + # group type for (de)allocation + if self.timestep_phase(): + group_type = 'timestep' # Just allocate for the timestep + else: + group_type = 'run' # Allocate for entire run + # end if + # Collect information on local variables + subpart_vars = {} + allocatable_var_set = set() + for item in [self]:# + self.parts: + for var in item.declarations(): + lname = var.get_prop_value('local_name') + if lname in subpart_vars: + if subpart_vars[lname][0].compatible(var, self.run_env): + pass # We already are going to declare this variable + else: + errmsg = "Duplicate Group variable, {}" + raise ParseInternalError(errmsg.format(lname)) + # end if + else: + subpart_vars[lname] = (var, item) + dims = var.get_dimensions() + if (dims is not None) and dims: + allocatable_var_set.add(lname) + # end if + # end if + # end for + # end for + # First, write out the subroutine header + subname = self.name + call_list = self.call_list.call_string() + outfile.write(Group.__subhead.format(subname=subname, args=call_list), + indent) + # Write out any use statements + if self._local_schemes: + modmax = max([len(s[0]) for s in self._local_schemes]) + else: + modmax = 0 + # end if + # Write out the scheme use statements + scheme_use = 'use {},{} only: {}' + for scheme in self._local_schemes: + smod = scheme[0] + sname = scheme[1] + slen = ' '*(modmax - len(smod)) + outfile.write(scheme_use.format(smod, slen, sname), indent+1) + # end for + # Look for any DDT types + call_vars = self.call_list.variable_list() + self._ddt_library.write_ddt_use_statements(call_vars, outfile, + indent+1, pad=modmax) + decl_vars = [x[0] for x in subpart_vars.values()] + self._ddt_library.write_ddt_use_statements(decl_vars, outfile, + indent+1, pad=modmax) + outfile.write('', 0) + # Write out dummy arguments + outfile.write('! Dummy arguments', indent+1) + msg = 'Variables for {}: ({})' + if (self.run_env.logger and + self.run_env.logger.isEnabledFor(logging.DEBUG)): + self.run_env.logger.debug(msg.format(self.name, call_vars)) + # end if + self.call_list.declare_variables(outfile, indent+1, dummy=True) + if subpart_vars: + outfile.write('\n! Local Variables', indent+1) + # Write out local variables + for key in subpart_vars: + var = subpart_vars[key][0] + spdict = subpart_vars[key][1] + var.write_def(outfile, indent+1, spdict, + allocatable=(key in allocatable_var_set)) + # end for + outfile.write('', 0) + # Get error variable names + if self.run_env.use_error_obj: + raise ParseInternalError("Error object not supported") + else: + verrcode = self.call_list.find_variable(standard_name='ccpp_error_code') + if verrcode is not None: + errcode = verrcode.get_prop_value('local_name') + else: + errmsg = "No ccpp_error_code variable for group, {}" + raise CCPPError(errmsg.format(self.name)) + # end if + verrmsg = self.call_list.find_variable(standard_name='ccpp_error_message') + if verrmsg is not None: + errmsg = verrmsg.get_prop_value('local_name') + else: + errmsg = "No ccpp_error_message variable for group, {}" + raise CCPPError(errmsg.format(self.name)) + # end if + # Initialize error variables + outfile.write("{} = 0".format(errcode), 2) + outfile.write("{} = ''".format(errmsg), 2) + # end if + # Output threaded region check (except for run phase) + if not self.run_phase(): + Group.__thread_check.write(outfile, indent, + {'phase' : self.phase(), + 'errcode' : errcode, + 'errmsg' : errmsg}) + # Check state machine + self._phase_check_stmts.write(outfile, indent, + {'errcode' : errcode, 'errmsg' : errmsg, + 'funcname' : self.name}) + # Allocate local arrays + alloc_stmt = "allocate({}({}))" + for lname in allocatable_var_set: + var = subpart_vars[lname][0] + dims = var.get_dimensions() + alloc_str = self.allocate_dim_str(dims, var.context) + outfile.write(alloc_stmt.format(lname, alloc_str), indent+1) + # end for + # Allocate suite vars + if allocate: + for svar in suite_vars.variable_list(): + dims = svar.get_dimensions() + if dims: + timestep_var = svar.get_prop_value('persistence') + if group_type == timestep_var: + alloc_str = self.allocate_dim_str(dims, svar.context) + lname = svar.get_prop_value('local_name') + outfile.write(alloc_stmt.format(lname, alloc_str), + indent+1) + # end if (do not allocate in this phase) + # end if dims (do not allocate scalars) + # end for + # end if + # Write any loop match calculations + for vmatch in self._loop_var_matches: + action = vmatch.write_action(self, dict2=self.call_list) + if action: + outfile.write(action, indent+1) + # end if + # end for + # Write the scheme and subcycle calls + for item in self.parts: + item.write(outfile, errcode, indent + 1) + # end for + # Deallocate local arrays + for lname in allocatable_var_set: + outfile.write('deallocate({})'.format(lname), indent+1) + # end for + # Deallocate suite vars + if deallocate: + for svar in suite_vars.variable_list(): + dims = svar.get_dimensions() + if dims: + timestep_var = svar.get_prop_value('persistence') + if group_type == timestep_var: + lname = svar.get_prop_value('local_name') + outfile.write('deallocate({})'.format(lname), indent+1) + # end if + # end if (no else, do not deallocate scalars) + # end for + # end if + self._set_state.write(outfile, indent, {}) + # end if + outfile.write(Group.__subend.format(subname=subname), indent) + + @property + def suite(self): + """Return this Group's suite""" + return self.parent + + def suite_dicts(self): + """Return a list of this Group's Suite's dictionaries""" + return self.suite.suite_dicts() + +############################################################################### + +if __name__ == "__main__": + # First, run doctest + import doctest + doctest.testmod() +# end if (no else) diff --git a/scripts/var_props.py b/scripts/var_props.py new file mode 100644 index 00000000..958c98e0 --- /dev/null +++ b/scripts/var_props.py @@ -0,0 +1,1289 @@ +#!/usr/bin/env python3 + +""" +Classes and supporting code to hold all information on the compatibility of +two CCPP metadata variables. +VariableProperty: Class which describes a single variable property +VarCompatObj +""" + +# Python library imports +import keyword +import re +# CCPP framework imports +from conversion_tools import unit_conversion +from framework_env import CCPPFrameworkEnv +from parse_tools import check_local_name, check_fortran_type, context_string +from parse_tools import FORTRAN_DP_RE, FORTRAN_SCALAR_REF_RE, fortran_list_match +from parse_tools import check_units, check_dimensions, check_cf_standard_name +from parse_tools import check_diagnostic_id, check_diagnostic_fixed +from parse_tools import check_default_value, check_valid_values +from parse_tools import ParseContext, ParseSource +from parse_tools import ParseInternalError, ParseSyntaxError, CCPPError + +############################################################################### +_REAL_SUBST_RE = re.compile(r"(.*\d)p(\d.*)") +_HDIM_TEMPNAME = '_CCPP_HORIZ_DIM' + +############################################################################### +# Supported horizontal dimensions (should be defined in CCPP_STANDARD_VARS) +CCPP_HORIZONTAL_DIMENSIONS = ['ccpp_constant_one:horizontal_dimension', + 'ccpp_constant_one:horizontal_loop_extent', + 'horizontal_loop_begin:horizontal_loop_end', + 'horizontal_dimension', 'horizontal_loop_extent'] + +############################################################################### +# Supported vertical dimensions (should be defined in CCPP_STANDARD_VARS) +CCPP_VERTICAL_DIMENSIONS = ['ccpp_constant_one:vertical_layer_dimension', + 'ccpp_constant_one:vertical_interface_dimension', + 'vertical_layer_dimension', + 'vertical_interface_dimension', + 'vertical_layer_index', 'vertical_interface_index'] + +############################################################################### +# Substituions for run time dimension control +CCPP_LOOP_DIM_SUBSTS = {'ccpp_constant_one:horizontal_dimension' : + 'horizontal_loop_begin:horizontal_loop_end', + 'ccpp_constant_one:vertical_layer_dimension' : + 'vertical_layer_index', + 'ccpp_constant_one:vertical_interface_dimension' : + 'vertical_interface_index'} + +######################################################################## +def is_horizontal_dimension(dim_name): +######################################################################## + """Return True if it is a recognized horizontal + dimension or index, otherwise, return False + >>> is_horizontal_dimension('horizontal_loop_extent') + True + >>> is_horizontal_dimension('ccpp_constant_one:horizontal_loop_extent') + True + >>> is_horizontal_dimension('ccpp_constant_one:horizontal_dimension') + True + >>> is_horizontal_dimension('horizontal_loop_begin:horizontal_loop_end') + True + >>> is_horizontal_dimension('horizontal_loop_begin:horizontal_loop_extent') + False + >>> is_horizontal_dimension('ccpp_constant_one') + False + """ + return dim_name in CCPP_HORIZONTAL_DIMENSIONS + +######################################################################## +def is_vertical_dimension(dim_name): +######################################################################## + """Return True if it is a recognized vertical + dimension or index, otherwise, return False + >>> is_vertical_dimension('ccpp_constant_one:vertical_layer_dimension') + True + >>> is_vertical_dimension('ccpp_constant_one:vertical_interface_dimension') + True + >>> is_vertical_dimension('vertical_layer_index') + True + >>> is_vertical_dimension('vertical_interface_index') + True + >>> is_vertical_dimension('ccpp_constant_one:vertical_layer_index') + False + >>> is_vertical_dimension('ccpp_constant_one:vertical_interface_index') + False + >>> is_vertical_dimension('horizontal_loop_extent') + False + """ + return dim_name in CCPP_VERTICAL_DIMENSIONS + +######################################################################## +def find_horizontal_dimension(dims): +######################################################################## + """Return the horizontal dimension string and location in <dims> + or (None, -1). + Return form is (horizontal_dimension, index) where index is the + location of horizontal_dimension in <dims>""" + var_hdim = None + hindex = -1 + for index, dimname in enumerate(dims): + if is_horizontal_dimension(dimname): + var_hdim = dimname + hindex = index + break + # end if + # end for + return (var_hdim, hindex) + +######################################################################## +def find_vertical_dimension(dims): +######################################################################## + """Return the vertical dimension string and location in <dims> + or (None, -1). + Return form is (vertical_dimension, index) where index is the + location of vertical_dimension in <dims>""" + var_vdim = None + vindex = -1 + for index, dimname in enumerate(dims): + if is_vertical_dimension(dimname): + var_vdim = dimname + vindex = index + break + # end if + # end for + return (var_vdim, vindex) + +######################################################################## +def standard_name_to_long_name(prop_dict, context=None): +######################################################################## + """Translate a standard_name to its default long_name + >>> standard_name_to_long_name({'standard_name':'cloud_optical_depth_layers_from_0p55mu_to_0p99mu'}) + 'Cloud optical depth layers from 0.55mu to 0.99mu' + >>> standard_name_to_long_name({'local_name':'foo'}) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No standard name to convert foo to long name + >>> standard_name_to_long_name({}) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No standard name to convert to long name + >>> standard_name_to_long_name({'local_name':'foo'}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No standard name to convert foo to long name at foo.F90:3 + >>> standard_name_to_long_name({}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No standard name to convert to long name at foo.F90:3 + """ + # We assume that standar_name has been checked for validity + # Make the first char uppercase and replace each underscore with a space + if 'standard_name' in prop_dict: + standard_name = prop_dict['standard_name'] + if standard_name: + long_name = standard_name[0].upper() + re.sub("_", " ", + standard_name[1:]) + else: + long_name = '' + # end if + # Next, substitute a decimal point for the p in [:digit]p[:digit] + match = _REAL_SUBST_RE.match(long_name) + while match is not None: + long_name = match.group(1) + '.' + match.group(2) + match = _REAL_SUBST_RE.match(long_name) + # end while + else: + long_name = '' + if 'local_name' in prop_dict: + lname = ' {}'.format(prop_dict['local_name']) + else: + lname = '' + # end if + ctxt = context_string(context) + emsg = 'No standard name to convert{} to long name{}' + raise CCPPError(emsg.format(lname, ctxt)) + # end if + return long_name + +######################################################################## +def default_kind_val(prop_dict, context=None): +######################################################################## + """Choose a default kind based on a variable's type + >>> default_kind_val({'type':'REAL'}) + 'kind_phys' + >>> default_kind_val({'type':'complex'}) + 'kind_phys' + >>> default_kind_val({'type':'double precision'}) + 'kind_phys' + >>> default_kind_val({'type':'integer'}) + '' + >>> default_kind_val({'type':'character'}) + '' + >>> default_kind_val({'type':'logical'}) + '' + >>> default_kind_val({'local_name':'foo'}) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No type to find default kind for foo + >>> default_kind_val({}) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No type to find default kind + >>> default_kind_val({'local_name':'foo'}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No type to find default kind for foo at foo.F90:3 + >>> default_kind_val({}, context=ParseContext(linenum=3, filename='foo.F90')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: No type to find default kind at foo.F90:3 + """ + if 'type' in prop_dict: + vtype = prop_dict['type'].lower() + if vtype == 'real': + kind = 'kind_phys' + elif vtype == 'complex': + kind = 'kind_phys' + elif FORTRAN_DP_RE.match(vtype) is not None: + kind = 'kind_phys' + else: + kind = '' + # end if + else: + kind = '' + if 'local_name' in prop_dict: + lname = ' {}'.format(prop_dict['local_name']) + errmsg = 'No type to find default kind for {ln}{ct}' + else: + lname = '' + errmsg = 'No type to find default kind{ct}' + # end if + ctxt = context_string(context) + raise CCPPError(errmsg.format(ln=lname, ct=ctxt)) + # end if + return kind + +######################################################################## + +class DimTransform: + """Class to represent a transformation between two variables with + compatible dimensions. + Compatible differences include permutations, sub-selection of the + horizontal dimension, and the ordering of the vertical dimension. + + The "forward" transformation transforms "var1" into "var2" + (i.e., var2 = forward_transform(var1)). + The "reverse" transformation transforms "var2" into "var1" + (i.e., var1 = reverse_transform(var2)). + """ + + def __init__(self, forward_permutation, reverse_permutation, + forward_hdim, forward_hdim_index, forward_vdim_index, + reverse_hdim, reverse_hdim_index, reverse_vdim_index): + """Initialize a dimension transform object. + <forward_permutation>: A tuple of integers with the location of the + "var1" index for each "var2" index. That is, the first index + for "var2" on the LHS of the forward transform is + <forward_permutation>[0]. + <reverse_permutation>: A tuple of integers with the location of the + "var2" index for each "var1" index. That is, the first index + for "var1" on the LHS of the forward transform is + <reverse_permutation>[0]. + <forward_hdim>: The name of the horizontal dimension for "var1". + This is used to determine if an offset needs to be added to + the forward and reverse transforms. + <forward_hdim_index>: This is the position of the horizontal dimension + for "var1". For instance, zero means that the horizontal axis is + the fastest varying. + <forward_vdim_index>: This is the position of the vertical dimension + for "var1". For instance, zero means that the vertical axis is + the fastest varying. + <reverse_hdim>: The name of the horizontal dimension for "var2". + This is used to determine if an offset needs to be added to + the forward and reverse transforms. + <reverse_hdim_index>: This is the position of the horizontal dimension + for "var2". For instance, zero means that the horizontal axis is + the fastest varying. + <reverse_vdim_index>: This is the position of the vertical dimension + for "var2". For instance, zero means that the vertical axis is + the fastest varying. + + # Test that bad inputs are trapped: + >>> DimTransform((0, 1, 2), (2, 1), 'horizontal_dimension', 0, 1, \ + 'horizontal_dimension', \ + 1, 0) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + parse_source.ParseInternalError: Permutation mismatch, '(0, 1, 2)' and '(2, 1)' + >>> DimTransform((2, 0, 1), (1, 2, 0), 'horizontal_dimension', 3, 4, \ + 'horizontal_dimension', \ + 4, 3) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + parse_source.ParseInternalError: forward_hdim_index (3) out of range [0, 2] + >>> DimTransform((2, 0, 1), (1, 2, 0), 'horizontal_dimension', 0, 4, \ + 'horizontal_dimension', \ + 4, 3) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + parse_source.ParseInternalError: forward_vdim_index (4) out of range [0, 2 + >>> DimTransform((2, 0, 1), (1, 2, 0), 'horizontal_dimension', 0, 2, \ + 'horizontal_dimension', \ + 4, 3) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + parse_source.ParseInternalError: reverse_hdim_index (4) out of range [0, 2] + >>> DimTransform((2, 0, 1), (1, 2, 0), 'horizontal_dimension', 3, 4, \ + 'horizontal_dimension', \ + 0, 3) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + parse_source.ParseInternalError: forward_hdim_index (3) out of range [0, 2] + """ + # Store inputs + if len(forward_permutation) != len(reverse_permutation): + emsg = "Permutation mismatch, '{}' and '{}'" + raise ParseInternalError(emsg.format(forward_permutation, + reverse_permutation)) + # end if + self.__forward_perm = forward_permutation + self.__reverse_perm = reverse_permutation + if ((forward_hdim_index < 0) or + (forward_hdim_index >= len(forward_permutation))): + emsg = "forward_hdim_index ({}) out of range [0, {}]" + raise ParseInternalError(emsg.format(forward_hdim_index, + len(forward_permutation)-1)) + # end if + self.__forward_hdim_index = forward_hdim_index + # We cannot test for negative forward_vdim_index because there may + # not be a vertical dimension + if forward_vdim_index >= len(forward_permutation): + emsg = "forward_vdim_index ({}) out of range [0, {}]" + raise ParseInternalError(emsg.format(forward_vdim_index, + len(forward_permutation)-1)) + # end if + self.__forward_vdim_index = forward_vdim_index + if ((reverse_hdim_index < 0) or + (reverse_hdim_index >= len(reverse_permutation))): + emsg = "reverse_hdim_index ({}) out of range [0, {}]" + raise ParseInternalError(emsg.format(reverse_hdim_index, + len(reverse_permutation)-1)) + # end if + self.__reverse_hdim_index = reverse_hdim_index + # We cannot test for negative reverse_vdim_index because there may + # not be a vertical dimension + if reverse_vdim_index >= len(reverse_permutation): + emsg = "reverse_vdim_index ({}) out of range [0, {}]" + raise ParseInternalError(emsg.format(reverse_vdim_index, + len(reverse_permutation)-1)) + # end if + self.__reverse_vdim_index = reverse_vdim_index + # Categorize horizontal dimensions + # v<x>_hloop is True if "var<x>" has extent "horizontal_loop_extent". + # The loop for these variables begins at one while variables with + # extent, "horizontal_dimension" begin at "horizontal_loop_begin" + # during the run phase. + self.__v1_hloop = self.__is_horizontal_loop_dimension(forward_hdim) + if ((not self.__v1_hloop) and + (not ("horizontal_dimension" in forward_hdim))): + emsg = "Uncategorized forward horizontal dimension, '{}'" + raise ParseInternalError(emsg.format(forward_hdim)) + # end if + self.__v2_hloop = self.__is_horizontal_loop_dimension(reverse_hdim) + if ((not self.__v2_hloop) and + (not ("horizontal_dimension" in reverse_hdim))): + emsg = "Uncategorized reverse horizontal dimension, '{}'" + raise ParseInternalError(emsg.format(reverse_hdim)) + # end if + + def forward_transform(self, var2_lname, indices, + adjust_hdim=None, flip_vdim=None): + """Compute and return the LHS of the forward transform from "var1" to + "var2". + <var2_lname> is the local name of "var2". + <indices> is a tuple of the loop indices for "var1" (i.e., "var1" + will show up in the RHS of the transform as "var1(indices)". + If <adjust_hdim> is not None, it should be a string containing the + local name of the "horizontal_loop_begin" variable. This is used to + compute the offset in the horizontal axis index between one and + "horizontal_loop_begin" (if any). This occurs when one of the + variables has extent "horizontal_loop_extent" and the other has + extent "horizontal_dimension". + If flip_vdim is not None, it should be a string containing the local + name of the vertical extent of the vertical axis for "var1" and + "var2" (i.e., "vertical_layer_dimension" or + "vertical_interface_dimension"). + + # Test forward transform with just horizontal adjustment + >>> DimTransform((0, 1), (0, 1), 'horizontal_dimension', 0, 1, \ + 'horizontal_loop_extent', \ + 0, 1).forward_transform("foo_lhs", ("hind", "vind"), \ + adjust_hdim="col_start") + 'foo_lhs(hind-col_start+1,vind)' + >>> DimTransform((0, 1), (0, 1), 'horizontal_loop_extent', 0, 1, \ + 'horizontal_dimension', \ + 0, 1).forward_transform("foo_lhs", ("hind", "vind"), \ + adjust_hdim="col_start") + 'foo_lhs(hind+col_start-1,vind)' + + # Test flipping vertical dimension + >>> DimTransform((0, 1), (0, 1), 'horizontal_dimension', 0, 1, \ + 'horizontal_dimension', \ + 0, 1).forward_transform("foo_lhs", ("hind", "vind"), \ + flip_vdim="pver") + 'foo_lhs(hind,pver-vind+1)' + + # Test simple permutations + >>> DimTransform((1, 0), (1, 0), 'horizontal_dimension', 0, 1, \ + 'horizontal_dimension', \ + 1, 0).forward_transform("foo_lhs", ("hind", "vind")) + 'foo_lhs(vind,hind)' + >>> DimTransform((2, 0, 1), (1, 2, 0), 'horizontal_dimension', 0, 2, \ + 'horizontal_dimension', \ + 0, 1).forward_transform("foo_lhs", \ + ("hind", "xdim", "vind")) + 'foo_lhs(vind,hind,xdim)' + """ + v2_indices = [indices[x] for x in self.__forward_perm] + if adjust_hdim is not None: + if self.__v1_hloop and (not self.__v2_hloop): + hdim = v2_indices[self.__forward_hdim_index] + adj_str = f"{hdim}+{adjust_hdim}-1" + v2_indices[self.__forward_hdim_index] = adj_str + elif self.__v2_hloop and (not self.__v1_hloop): + hdim = v2_indices[self.__forward_hdim_index] + adj_str = f"{hdim}-{adjust_hdim}+1" + v2_indices[self.__forward_hdim_index] = adj_str + # end if + # end if + if flip_vdim is not None: + vdim = v2_indices[self.__forward_vdim_index] + adj_str = f"{flip_vdim}-{vdim}+1" + v2_indices[self.__forward_vdim_index] = adj_str + # end if + return f"{var2_lname}({','.join(v2_indices)})" + + def reverse_transform(self, var1_lname, indices, + adjust_hdim=None, flip_vdim=None): + """Compute and return the LHS of the forward transform from "var2" to + "var1". + <var1_lname> is the local name of "var1". + <indices> is a tuple of the loop indices for "var2" (i.e., "var2" + will show up in the RHS of the transform as "var2(indices)". + If <adjust_hdim> is not None, it should be a string containing the + local name of the "horizontal_loop_begin" variable. This is used to + compute the offset in the horizontal axis index between one and + "horizontal_loop_begin" (if any). This occurs when one of the + variables has extent "horizontal_loop_extent" and the other has + extent "horizontal_dimension". + If flip_vdim is not None, it should be a string containing the local + name of the vertical extent of the vertical axis for "var2" and + "var1" (i.e., "vertical_layer_dimension" or + "vertical_interface_dimension"). + + # Test reverse transform with just horizontal adjustment + >>> DimTransform((0, 1), (0, 1), 'horizontal_dimension', 0, 1, \ + 'horizontal_loop_extent', \ + 0, 1).reverse_transform("bar_lhs", ("hind", "vind"), \ + adjust_hdim="col_start") + 'bar_lhs(hind+col_start-1,vind)' + >>> DimTransform((0, 1), (0, 1), 'horizontal_loop_extent', 0, 1, \ + 'horizontal_dimension', \ + 0, 1).reverse_transform("bar_lhs", ("hind", "vind"), \ + adjust_hdim="col_start") + 'bar_lhs(hind-col_start+1,vind)' + + # Test flipping vertical dimension + >>> DimTransform((0, 1), (0, 1), 'horizontal_dimension', 0, 1, \ + 'horizontal_dimension', \ + 0, 1).reverse_transform("bar_lhs", ("hind", "vind"), \ + flip_vdim="pver") + 'bar_lhs(hind,pver-vind+1)' + + # Test simple permutations + >>> DimTransform((1, 0), (1, 0), 'horizontal_dimension', 0, 1, \ + 'horizontal_dimension', \ + 1, 0).reverse_transform("bar_lhs", ("hind", "vind")) + 'bar_lhs(vind,hind)' + >>> DimTransform((2, 0, 1), (1, 2, 0), 'horizontal_dimension', 0, 2, \ + 'horizontal_dimension', \ + 0, 1).reverse_transform("bar_lhs", \ + ("vind", "hind", "xdim")) + 'bar_lhs(hind,xdim,vind)' + """ + v1_indices = [indices[x] for x in self.__reverse_perm] + if adjust_hdim is not None: + if self.__v1_hloop and (not self.__v2_hloop): + hdim = v1_indices[self.__reverse_hdim_index] + adj_str = f"{hdim}-{adjust_hdim}+1" + v1_indices[self.__reverse_hdim_index] = adj_str + elif self.__v2_hloop and (not self.__v1_hloop): + hdim = v1_indices[self.__reverse_hdim_index] + adj_str = f"{hdim}+{adjust_hdim}-1" + v1_indices[self.__reverse_hdim_index] = adj_str + # end if + # end if + if flip_vdim is not None: + vdim = v1_indices[self.__reverse_vdim_index] + adj_str = f"{flip_vdim}-{vdim}+1" + v1_indices[self.__reverse_vdim_index] = adj_str + # end if + return f"{var1_lname}({','.join(v1_indices)})" + + @staticmethod + def __is_horizontal_loop_dimension(hdim): + """Return True if <hdim> is a run-phase horizontal dimension""" + return (is_horizontal_dimension(hdim) and + ("horizontal_dimension" not in hdim)) + +######################################################################## + +class VariableProperty: + """Class to represent a single property of a metadata header entry + >>> VariableProperty('local_name', str) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('standard_name', str) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('long_name', str) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('units', str) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('dimensions', list) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('type', str) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('kind', str) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('state_variable', str, valid_values_in=['True', 'False', '.true.', '.false.' ], optional_in=True, default_in=False) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('intent', str, valid_values_in=['in', 'out', 'inout']) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('optional', str, valid_values_in=['True', 'False', '.true.', '.false.' ], optional_in=True, default_in=False) #doctest: +ELLIPSIS + <__main__.VariableProperty object at ...> + >>> VariableProperty('local_name', str).name + 'local_name' + >>> VariableProperty('standard_name', str).type == str + True + >>> VariableProperty('units', str).is_match('units') + True + >>> VariableProperty('units', str).is_match('UNITS') + True + >>> VariableProperty('units', str).is_match('type') + False + >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('2') + 2 + >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('3') + + >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('3', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Invalid value variable property, '3' + >>> VariableProperty('units', str, check_fn_in=check_units).valid_value('m s-1') + 'm s-1' + >>> VariableProperty('units', str, check_fn_in=check_units).valid_value(' ') + + >>> VariableProperty('units', str, check_fn_in=check_units).valid_value(' ', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: ' ' is not a valid unit + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('()') + [] + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x)') + ['x'] + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('x') + + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x:y)') + ['x:y'] + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(w:x,y:z)') + ['w:x', 'y:z'] + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value(['size(foo)']) + ['size(foo)'] + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(w:x,x:y:z:q)', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: 'x:y:z:q' is an invalid dimension range + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x:3y)', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: '3y' is not a valid Fortran identifier + >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('foo') + 'foo' + >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('foo(bar)') + 'foo(bar)' + >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('q(:,:,index_of_water_vapor_specific_humidity)') + 'q(:,:,index_of_water_vapor_specific_humidity)' + """ + + __true_vals = ['t', 'true', '.true.'] + __false_vals = ['f', 'false', '.false.'] + + def __init__(self, name_in, type_in, valid_values_in=None, + optional_in=False, default_in=None, default_fn_in=None, + check_fn_in=None, mult_entry_ok=False): + """Conduct sanity checks and initialize this variable property.""" + self._name = name_in + self._type = type_in + if self._type not in [bool, int, list, str]: + emsg = "{} has invalid VariableProperty type, '{}'" + raise CCPPError(emsg.format(name_in, type_in)) + # end if + self._valid_values = valid_values_in + self._optional = optional_in + self._default = None + self._default_fn = None + if self.optional: + if (default_in is None) and (default_fn_in is None): + emsg = 'default_in or default_fn_in is a required property for {} because it is optional' + raise CCPPError(emsg.format(name_in)) + if (default_in is not None) and (default_fn_in is not None): + emsg = 'default_in and default_fn_in cannot both be provided' + raise CCPPError(emsg) + self._default = default_in + self._default_fn = default_fn_in + elif default_in is not None: + emsg = 'default_in is not a valid property for {} because it is not optional' + raise CCPPError(emsg.format(name_in)) + elif default_in is not None: + emsg = 'default_fn_in is not a valid property for {} because it is not optional' + raise CCPPError(emsg.format(name_in)) + self._check_fn = check_fn_in + self._add_multiple_ok = mult_entry_ok + + @property + def name(self): + """Return the name of the property""" + return self._name + + @property + def type(self): + """Return the type of the property""" + return self._type + + @property + def has_default_func(self): + """Return True iff this variable property has a default function""" + return self._default_fn is not None + + def get_default_val(self, prop_dict, context=None): + """Return this variable property's default value or raise an + exception if there is no default value or default value function.""" + if self.has_default_func: + return self._default_fn(prop_dict, context) + # end if + if self._default is not None: + return self._default + # end if + ctxt = context_string(context) + emsg = 'No default for variable property {}{}' + raise CCPPError(emsg.format(self.name, ctxt)) + + + @property + def optional(self): + """Return True iff this variable property is optional""" + return self._optional + + @property + def add_multiple(self): + """Return True iff multiple entries of this property should be + accumulated. If False, it should either be an error or new + instances should replace the old, however, this functionality + must be implemented by the calling routine (e.g., Var)""" + return self._add_multiple_ok + + def is_match(self, test_name): + """Return True iff <test_name> is the name of this property""" + return self.name.lower() == test_name.lower() + + def valid_value(self, test_value, prop_dict=None, error=False): + """Return a valid version of <test_value> if it is valid. + If <test_value> is not valid, return None or raise an exception, + depending on the value of <error>. + If <prop_dict> is not None, it may be used in value validation. + """ + valid_val = None + if self.type is int: + try: + tval = int(test_value) + if self._valid_values is not None: + if tval in self._valid_values: + valid_val = tval + else: + valid_val = None # i.e. pass + else: + valid_val = tval + except CCPPError: + valid_val = None # Redundant but more expressive than pass + elif self.type is list: + if isinstance(test_value, str): + tval = fortran_list_match(test_value) + if tval and (len(tval) == 1) and (not tval[0]): + # Scalar + tval = list() + # end if + else: + tval = test_value + # end if + if isinstance(tval, list): + valid_val = tval + elif isinstance(tval, tuple): + valid_val = list(tval) + else: + valid_val = None + # end if + if (valid_val is not None) and (self._valid_values is not None): + # Special case for lists, _valid_values applies to elements + for item in valid_val: + if item not in self._valid_values: + valid_val = None + break + # end if + # end for + else: + pass + elif self.type is bool: + if isinstance(test_value, str): + if test_value.lower() in VariableProperty.__true_vals + VariableProperty.__false_vals: + valid_val = test_value.lower() in VariableProperty.__true_vals + else: + valid_val = None # i.e., pass + # end if + else: + valid_val = not not test_value # pylint: disable=unneeded-not + elif self.type is str: + if isinstance(test_value, str): + if self._valid_values is not None: + if test_value in self._valid_values: + valid_val = test_value + else: + valid_val = None # i.e., pass + else: + valid_val = test_value + # end if + # end if + # end if + # Call a check function? + if valid_val and (self._check_fn is not None): + valid_val = self._check_fn(valid_val, prop_dict, error) + elif (valid_val is None) and error: + emsg = "Invalid {} variable property, '{}'" + raise CCPPError(emsg.format(self.name, test_value)) + # end if + return valid_val + +############################################################################## + +class VarCompatObj: + """Class to compare two Var objects and then answer questions about + the compatibility of the two variables. + There are three levels of compatibility. + * Compatible is when two variables match in all properties so that one + can be passed to another with no transformation. + * Comformable is when two variables have the same information but may + need some transformation between them. Examples are differences in + dimension ordering, units, or kind. + * Not Compatible is when information from one variable cannot be passed + to the other. + + Note that character(len=*) is considered equivalent to + character(len=<INTEGER_VALUE>) + + # Test that we can create a standard VarCompatObj object + >>> VarCompatObj("var_stdname", "real", "kind_phys", "m", [], \ + "var1_lname", "var_stdname", "real", "kind_phys", \ + "m", [], "var2_lname", _DOCTEST_RUNENV) #doctest: +ELLIPSIS + <__main__.VarCompatObj object at 0x...> + + # Test that a 2-D var with no horizontal transform works + >>> VarCompatObj("var_stdname", "real", "kind_phys", "m", \ + ['horizontal_dimension'], "var1_lname", "var_stdname", \ + "real", "kind_phys", "m", ['horizontal_dimension'], \ + "var2_lname", _DOCTEST_RUNENV) #doctest: +ELLIPSIS + <__main__.VarCompatObj object at 0x...> + + # Test that a 2-D var with a horizontal transform works + >>> VarCompatObj("var_stdname", "real", "kind_phys", "m", \ + ['horizontal_dimension'], "var1_lname", "var_stdname", \ + "real", "kind_phys", "m", ['horizontal_loop_extent'], \ + "var2_lname", _DOCTEST_RUNENV) #doctest: +ELLIPSIS + <__main__.VarCompatObj object at 0x...> + """ + + def __init__(self, var1_stdname, var1_type, var1_kind, var1_units, + var1_dims, var1_lname, var2_stdname, var2_type, var2_kind, + var2_units, var2_dims, var2_lname, run_env, v1_context=None, + v2_context=None): + """Initialize this object with information on the equivalence and/or + conformability of two variables. + variable 1 is described by <var1_stdname>, <var1_type>, <var1_kind>, + <var1_units>, <var1_dims>, <var1_lname>, and <v1_context>. + variable 2 is described by <var2_stdname>, <var2_type>, <var2_kind>, + <var2_units>, <var2_dims>, <var2_lname>, and <v2_context>. + <run_env> is the CCPPFrameworkEnv object used here to verify kind + equivalence or to produce kind transformations. + """ + self.__equiv = True # No transformation required + self.__compat = True # Callable with transformation + self.__stdname = var1_stdname + self.__v1_context = v1_context + self.__v2_context = v2_context + self.__v1_kind = var1_kind + self.__v2_kind = var2_kind + # Default (null) transform information + self.__dim_transforms = None + self.__kind_transforms = None + self.__unit_transforms = None + incompat_reason = list() + # First, check for fatal incompatibilities + if var1_stdname != var2_stdname: + self.__equiv = False + self.__compat = False + incompat_reason.append("standard names") + # end if + if var1_type != var2_type: + self.__equiv = False + self.__compat = False + incompat_reason.append("types") + # end if + # Check kind argument + if self.__compat: + if var1_type == 'character': + # First, make sure we have supported character 'kind' args: + v1_kind = self.char_kind_check(var1_kind) + if not v1_kind: + ctx = context_string(v1_context) + emsg = "Unsupported character kind/len argument, '{}', " + emsg += "in {}{}" + incompat_reason.append(emsg.format(var1_kind, + var1_lname, ctx)) + # end if + self.__v1_kind = None + v2_kind = self.char_kind_check(var2_kind) + if not v2_kind: + ctx = context_string(v2_context) + emsg = "Unsupported character kind/len argument, '{}', " + emsg += "in {}{}" + incompat_reason.append(emsg.format(var2_kind, + var2_lname, ctx)) + # end if + self.__v2_kind = None + # Character types have to 'match' or the variables are + # incompatible + kind_eq = ((v1_kind and v2_kind) and + ((v1_kind == v2_kind) or + (((v1_kind == 'len=*') and + (v2_kind.startswith('len='))) or + (v1_kind.startswith('len=') and + (v2_kind == 'len=*'))))) + if not kind_eq: + self.__equiv = False + self.__compat = False + incompat_reason.append("character len arguments") + # end if + else: + if var1_kind != var2_kind: + self.__kind_transforms = self._get_kind_convstrs(var1_kind, + var2_kind, + run_env) + self.__equiv = self.__kind_transforms is None + # end if + # end if + # end if + if self.__compat: + # Check units argument + if var1_units != var2_units: + self.__equiv = False + # Try to find a set of unit conversions + self.__unit_transforms = self._get_unit_convstrs(var1_units, + var2_units) + # end if + # end if + if self.__compat: + # Check dimensions + ##XXgoldyXX: For now, we always have to create a dimension + ## transform because we do not know if the vertical + ## dimension is flipped. + if var1_dims or var2_dims: + _, vdim_ind = find_vertical_dimension(var1_dims) + if (var1_dims != var2_dims) or (vdim_ind >= 0): + self.__dim_transforms = self._get_dim_transforms(var1_dims, + var2_dims) + self.__compat = self.__dim_transforms is not None + # end if + # end if + if not self.__compat: + incompat_reason.append('dimensions') + # end if + # end if + self.__incompat_reason = " and ".join([x for x in incompat_reason if x]) + + def forward_transform(self, lvar_lname, rvar_lname, indices, + adjust_hdim=None, flip_vdim=None): + """Compute and return the the forward transform from "var1" to "var2". + <lvar_lname> is the local name of "var2". + <rvar_lname> is the local name of "var1". + <indices> is a tuple of the loop indices for "var1" (i.e., "var1" + will show up in the RHS of the transform as "var1(indices)". + If <adjust_hdim> is not None, it should be a string containing the + local name of the "horizontal_loop_begin" variable. This is used to + compute the offset in the horizontal axis index between one and + "horizontal_loop_begin" (if any). This occurs when one of the + variables has extent "horizontal_loop_extent" and the other has + extent "horizontal_dimension". + If flip_vdim is not None, it should be a string containing the local + name of the vertical extent of the vertical axis for "var1" and + "var2" (i.e., "vertical_layer_dimension" or + "vertical_interface_dimension"). + """ + # Grab any dimension transform + if self.has_dim_transforms: + dtrans = self.__dim_transforms + lhs_term = dtrans.forward_transform(lvar_lname, indices, + adjust_hdim=adjust_hdim, + flip_vdim=flip_vdim) + else: + lhs_term = f"{lvar_lname}({','.join(indices)})" + # end if + rhs_term = f"{rvar_lname}({','.join(indices)})" + if self.has_kind_transforms: + kind = self.__kind_transforms[1] + rhs_term = f"real({rhs_term}, {kind})" + else: + kind = '' + # end if + if self.has_unit_transforms: + if kind: + kind = "_" + kind + elif self.__v2_kind: + kind = "_" + self.__v2_kind + # end if + rhs_term = self.__unit_transforms[0].format(var=rhs_term, kind=kind) + # end if + return f"{lhs_term} = {rhs_term}" + + def reverse_transform(self, lvar_lname, rvar_lname, indices, + adjust_hdim=None, flip_vdim=None): + """Compute and return the the reverse transform from "var2" to "var1". + <lvar_lname> is the local name of "var1". + <rvar_lname> is the local name of "var2". + <indices> is a tuple of the loop indices for "var2" (i.e., "var2" + will show up in the RHS of the transform as "var2(indices)". + If <adjust_hdim> is not None, it should be a string containing the + local name of the "horizontal_loop_begin" variable. This is used to + compute the offset in the horizontal axis index between one and + "horizontal_loop_begin" (if any). This occurs when one of the + variables has extent "horizontal_loop_extent" and the other has + extent "horizontal_dimension". + If flip_vdim is not None, it should be a string containing the local + name of the vertical extent of the vertical axis for "var1" and + "var2" (i.e., "vertical_layer_dimension" or + "vertical_interface_dimension"). + """ + # Grab any dimension transform + if self.has_dim_transforms: + dtrans = self.__dim_transforms + lhs_term = dtrans.reverse_transform(lvar_lname, indices, + adjust_hdim=adjust_hdim, + flip_vdim=flip_vdim) + else: + lhs_term = f"{lvar_lname}({','.join(indices)})" + # end if + rhs_term = f"{rvar_lname}({','.join(indices)})" + if self.has_kind_transforms: + kind = self.__kind_transforms[0] + rhs_term = f"real({rhs_term}, {kind})" + else: + kind = '' + # end if + if self.has_unit_transforms: + if kind: + kind = "_" + kind + elif self.__v1_kind: + kind = "_" + self.__v1_kind + # end if + rhs_term = self.__unit_transforms[1].format(var=rhs_term, kind=kind) + # end if + return f"{lhs_term} = {rhs_term}" + + def _get_kind_convstrs(self, var1_kind, var2_kind, run_env): + """Attempt to determine if no transformation is required (i.e., if + <var1_kind> and <var2_kind> will be the same at runtime. If so, + return None. + If a conversion is required, return a tuple with the two kinds, + i.e., (var1_kind, var2_kind). + + # Try some kind conversions + >>> _DOCTEST_VCOMPAT._get_kind_convstrs('kind_phys', 'kind_dyn', \ + _DOCTEST_RUNENV) + ('kind_phys', 'kind_dyn') + >>> _DOCTEST_VCOMPAT._get_kind_convstrs('kind_phys', 'REAL32', \ + _DOCTEST_RUNENV) + ('kind_phys', 'REAL32') + + # Try some non-conversions + >>> _DOCTEST_VCOMPAT._get_kind_convstrs('kind_phys', 'kind_host', \ + _DOCTEST_RUNENV) + + >>> _DOCTEST_VCOMPAT._get_kind_convstrs('REAL64', 'kind_host', \ + _DOCTEST_RUNENV) + + """ + kind1 = run_env.kind_spec(var1_kind) + if kind1 is None: + kind1 = var1_kind + # end if + kind2 = run_env.kind_spec(var2_kind) + if kind2 is None: + kind2 = var2_kind + # end if + if kind1 != kind2: + return (var1_kind, var2_kind) + # end if + return None + + def _get_unit_convstrs(self, var1_units, var2_units): + """Attempt to retrieve the forward and reverse unit transformations + for transforming a variable in <var1_units> to / from a variable in + <var2_units>. + + # Try some working unit transforms + >>> _DOCTEST_VCOMPAT._get_unit_convstrs('m', 'mm') + ('1.0E+3{kind}*{var}', '1.0E-3{kind}*{var}') + >>> _DOCTEST_VCOMPAT._get_unit_convstrs('kg kg-1', 'g kg-1') + ('1.0E+3{kind}*{var}', '1.0E-3{kind}*{var}') + >>> _DOCTEST_VCOMPAT._get_unit_convstrs('C', 'K') + ('{var}+273.15{kind}', '{var}-273.15{kind}') + + # Try an unsupported conversion + >>> _DOCTEST_VCOMPAT._get_unit_convstrs('C', 'm') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + parse_source.ParseSyntaxError: Unsupported unit conversion, 'C' to 'm' for 'var_stdname' + """ + u1_str = self.units_to_string(var1_units, self.__v1_context) + u2_str = self.units_to_string(var2_units, self.__v2_context) + unit_conv_str = "{0}__to__{1}".format(u1_str, u2_str) + try: + forward_transform = getattr(unit_conversion, unit_conv_str)() + except AttributeError: + emsg = "Unsupported unit conversion, '{}' to '{}' for '{}'" + raise ParseSyntaxError(emsg.format(var1_units, var2_units, + self.__stdname, + context=self.__v2_context)) + # end if + unit_conv_str = "{0}__to__{1}".format(u2_str, u1_str) + try: + reverse_transform = getattr(unit_conversion, unit_conv_str)() + except AttributeError: + emsg = "Unsupported unit conversion, '{}' to '{}' for '{}'" + raise ParseSyntaxError(emsg.format(var2_units, var1_units, + self.__stdname, + context=self.__v1_context)) + # end if + return (forward_transform, reverse_transform) + + def _get_dim_transforms(self, var1_dims, var2_dims): + """Attempt to find forward and reverse permutations for transforming a + variable with shape, <v1_dims>, to / from a variable with shape, + <v2_dims>. + Return the permutations, or None. + The forward dimension transformation is a permutation of the indices of + the first variable to the second. + The reverse dimension transformation is a permutation of the indices of + the second variable to the first. + + # Test simple permutations + >>> _DOCTEST_VCOMPAT._get_dim_transforms(['horizontal_dimension', \ + 'vertical_layer_dimension'], \ + ['vertical_layer_dimension', \ + 'horizontal_dimension']) \ + #doctest: +ELLIPSIS + <__main__.DimTransform object at 0x...> + >>> _DOCTEST_VCOMPAT._get_dim_transforms(['horizontal_dimension', \ + 'vertical_layer_dimension', \ + 'xdim'], \ + ['vertical_layer_dimension', \ + 'horizontal_dimension', \ + 'xdim']) #doctest: +ELLIPSIS + <__main__.DimTransform object at 0x...> + >>> _DOCTEST_VCOMPAT._get_dim_transforms(['horizontal_dimension', \ + 'vertical_layer_dimension', \ + 'xdim'], \ + ['xdim', \ + 'horizontal_dimension', \ + 'vertical_layer_dimension']) \ + #doctest: +ELLIPSIS + <__main__.DimTransform object at 0x...> + + # Test some mismatch sets + >>> _DOCTEST_VCOMPAT._get_dim_transforms(['horizontal_dimension', \ + 'vertical_layer_dimension', \ + 'xdim'], \ + ['horizontal_dimension', \ + 'vertical_layer_dimension']) \ + + >>> _DOCTEST_VCOMPAT._get_dim_transforms(['horizontal_dimension', \ + 'vertical_layer_dimension', \ + 'xdim'], \ + ['horizontal_dimension', \ + 'vertical_layer_dimension', \ + 'ydim']) + + """ + transforms = None + v1_dims = self.__regularize_dimensions(var1_dims) + v2_dims = self.__regularize_dimensions(var2_dims) + if v1_dims != v2_dims: + self.__equiv = False + # end if + # Is v2 a permutation of v1? + if len(v1_dims) == len(v2_dims): + v1_set = sorted(v1_dims) + v2_set = sorted(v2_dims) + if v1_set == v2_set: + forward_permutation = list() + reverse_permutation = [None] * len(v1_dims) + forward_hdim = '' + forward_hdim_index = -1 + forward_vdim_index = -1 + reverse_hdim = '' + reverse_hdim_index = -1 + reverse_vdim_index = -1 + for v2index, v2dim in enumerate(v2_dims): + for v1index, v1dim in enumerate(v1_dims): + if v1dim == v2dim: + # Add check for repeated indices + if v1index not in forward_permutation: + forward_permutation.append(v1index) + reverse_permutation[v1index] = v2index + if is_horizontal_dimension(var1_dims[v1index]): + forward_hdim = var1_dims[v1index] + forward_hdim_index = v1index + reverse_hdim = var2_dims[v2index] + reverse_hdim_index = v2index + elif is_vertical_dimension(var1_dims[v1index]): + forward_vdim_index = v1index + reverse_vdim_index = v2index + # end if + break + # end if + # end if (hope there is a repeated dimension) + # end for + # end for + if len(forward_permutation) != len(v1_dims): + emsg = "Bad dimension handling, '{}' and '{}'" + raise ParseInternalError(emsg.format(var1_dims, var2_dims)) + # end if + transforms = DimTransform(forward_permutation, + reverse_permutation, + forward_hdim, forward_hdim_index, + forward_vdim_index, + reverse_hdim, reverse_hdim_index, + reverse_vdim_index) + # end if + # end if + return transforms + + @staticmethod + def char_kind_check(kind_str): + """If <kind_str> is a supported character 'kind' argument, return its + standardized form, otherwise return False. + """ + kind_ok = False + if isinstance(kind_str, str): + # Character allows both len and kind but we only support len + kentries = [x.strip() for x in kind_str.split(',') if x.strip()] + if len(kentries) == 1: + if kentries[0][0:4].lower() == 'len=': + kind_ok = True + # end if (no else, kind_ok already False) + # end if (no else, kind_ok already False) + # end if (no else, kind_ok already False) + return kind_ok + + @staticmethod + def units_to_string(units, context=None): + """Replace variable unit description with string that is a legal + Python identifier. + If the resulting string is a Python keyword, raise an exception.""" + # Replace each whitespace with an underscore + string = units.replace(" ","_") + # Replace each minus sign with '_minus_' + string = string.replace("-","_minus_") + # Replace each plus sign with '_plus_' + string = string.replace("+","_plus_") + # Test that the resulting string is a valid Python identifier + if not string.isidentifier(): + emsg = "Unsupported units entry, '{}'{}" + ctx = context_string(context) + raise ParseSyntaxError(emsg.format(units ,ctx)) + # end if + # Test that the resulting string is NOT a Python keyword + if keyword.iskeyword(string): + emsg = "Invalid units entry, '{}', Python identifier" + raise ParseSyntaxError(emsg.format(units), + context=context) + # end if + return string + + @staticmethod + def __regularize_dimensions(dims): + """Regularize <dims> by substituting a standin for any horizontal + dimension description (e.g., 'ccpp_constant_one:horizontal_loop_extent', + 'horizontal_loop_begin:horizontal_loop_end'). Also, regularize all + other dimensions by adding 'ccpp_constant_one' to any singleton + dimension. + Return the regularized dimensions. + """ + new_dims = list() + for dim in dims: + if is_horizontal_dimension(dim): + new_dims.append(_HDIM_TEMPNAME) + elif ':' not in dim: + new_dims.append('ccpp_constant_one:' + dim) + else: + new_dims.append(dim) + # end if + # end for + return new_dims + + @property + def incompat_reason(self): + """Return the reason(s) the two variables are incompatible (or an + empty string)""" + return self.__incompat_reason + + @property + def equiv(self): + """Return True if this object describes two Var objects which are + equivalent (i.e., no transformation required to pass one to the other). + """ + return self.__equiv + + @property + def compat(self): + """Return True if this object describes two Var objects which are + compatible (i.e., the values from one can be transferred to the other + via the transformation(s) described in the object). + """ + return self.__compat + + @property + def has_dim_transforms(self): + """Return True if this object has dimension transformations. + The dimension transformations is a tuple for forward and reverse + transformation. + The forward dimension transformation is a permutation of the indices of + the first variable to the second. + The reverse dimension transformation is a permutation of the indices of + the second variable to the first. + """ + return self.__dim_transforms is not None + + @property + def has_kind_transforms(self): + """Return True if this object has the kind transformation. + The kind transformation is a tuple containing the forward and reverse + kind transformations. + The forward kind transformation is a string representation of the + kind of the second variable. + The reverse kind transformation is a string representation of the + kind of the first variable. + """ + return self.__kind_transforms is not None + + @property + def has_unit_transforms(self): + """Return True if this object has the unit transformations. + The unit transformations is a tuple with forward and reverse unit + transformations. + The forward unit transformation is a string representation of the + equation to transform the first variable into the units of the second + The reverse unit transformation is a string representation of the + equation to transform the second variable into the units of the first + Each unit transform is a string which can be formatted with <kind> + and <var> arguments to produce code to transform one variable into + the correct units of the other. + """ + return self.__unit_transforms is not None + + def __bool__(self): + """Return True if this object describes two Var objects which are + equivalent (i.e., no transformation required to pass one to the other). + """ + return self.equiv + +############################################################################### +if __name__ == "__main__": + import doctest + from parse_tools import init_log, set_log_to_null + _DOCTEST_LOGGING = init_log('var_props') + set_log_to_null(_DOCTEST_LOGGING) + _DOCTEST_RUNENV = CCPPFrameworkEnv(_DOCTEST_LOGGING, + ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}, + kind_types=["kind_phys=REAL64", + "kind_dyn=REAL32", + "kind_host=REAL64"]) + _DOCTEST_VCOMPAT = VarCompatObj("var_stdname", "real", "kind_phys", + "m", [], "var1_lname", "var_stdname", + "real", "kind_phys", "m", [], + "var2_lname", _DOCTEST_RUNENV) + doctest.testmod() diff --git a/src/ccpp_constituent_prop_mod.F90 b/src/ccpp_constituent_prop_mod.F90 index 2006b72c..419af297 100644 --- a/src/ccpp_constituent_prop_mod.F90 +++ b/src/ccpp_constituent_prop_mod.F90 @@ -18,7 +18,8 @@ module ccpp_constituent_prop_mod type, public, extends(ccpp_hashable_char_t) :: ccpp_constituent_properties_t ! A ccpp_constituent_properties_t object holds relevant metadata ! for a constituent species and provides interfaces to access that data. - character(len=:), private, allocatable :: std_name + character(len=:), private, allocatable :: var_std_name + character(len=:), private, allocatable :: var_long_name character(len=:), private, allocatable :: vert_dim integer, private :: const_ind = int_unassigned integer, private :: field_ind = int_unassigned @@ -29,6 +30,7 @@ module ccpp_constituent_prop_mod ! Informational methods procedure :: is_initialized => ccp_is_initialized procedure :: standard_name => ccp_get_standard_name + procedure :: long_name => ccp_get_long_name procedure :: is_layer_var => ccp_is_layer_var procedure :: is_interface_var => ccp_is_interface_var procedure :: is_2d_var => ccp_is_2d_var @@ -63,6 +65,8 @@ module ccpp_constituent_prop_mod real(kind_phys), allocatable :: vars_layer(:,:,:) real(kind_phys), allocatable :: vars_interface(:,:,:) real(kind_phys), allocatable :: vars_2d(:,:) + ! An array containing all the constituent metadata + ! XXgoldyXX: Is this needed? Source of duplicate metadata? type(ccpp_constituent_properties_t), allocatable :: const_metadata(:) contains ! Return .true. if a constituent matches pattern @@ -113,7 +117,8 @@ subroutine copyConstituent(outConst, inConst) class(ccpp_constituent_properties_t), intent(inout) :: outConst type(ccpp_constituent_properties_t), intent(in) :: inConst - outConst%std_name = inConst%std_name + outConst%var_std_name = inConst%var_std_name + outConst%var_long_name = inConst%var_long_name outConst%vert_dim = inConst%vert_dim outConst%const_ind = inConst%const_ind outConst%field_ind = inConst%field_ind @@ -122,18 +127,18 @@ end subroutine copyConstituent !####################################################################### - subroutine handle_allocate_error(astat, fieldname, errflg, errmsg) + subroutine handle_allocate_error(astat, fieldname, errcode, errmsg) ! Generate an error message if <astat> indicates an allocation failure ! Dummy arguments integer, intent(in) :: astat character(len=*), intent(in) :: fieldname - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg if (astat /= 0) then - if (present(errflg)) then - errflg = astat + if (present(errcode)) then + errcode = astat end if if (present(errmsg)) then write(errmsg, '(4a,i0)') 'Error allocating ', & @@ -141,8 +146,8 @@ subroutine handle_allocate_error(astat, fieldname, errflg, errmsg) trim(fieldname), ', error code = ', astat end if else - if (present(errflg)) then - errflg = 0 + if (present(errcode)) then + errcode = 0 end if if (present(errmsg)) then errmsg = '' @@ -154,44 +159,44 @@ end subroutine handle_allocate_error !####################################################################### function ccp_properties_get_key(hashable) - ! Return the constituent properties class key (std_name) + ! Return the constituent properties class key (var_std_name) ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: hashable character(len=:), allocatable :: ccp_properties_get_key - ccp_properties_get_key = hashable%std_name + ccp_properties_get_key = hashable%var_std_name end function ccp_properties_get_key !####################################################################### - logical function ccp_is_initialized(this, errflg, errmsg) + logical function ccp_is_initialized(this, errcode, errmsg) ! Return .true. iff <this> is initialized - ! If <this> is *not* initialized and <errflg> and/or <errmsg> is present, + ! If <this> is *not* initialized and <errcode> and/or <errmsg> is present, ! fill these fields with an error status - ! If <this> *is* initialized and <errflg> and/or <errmsg> is present, + ! If <this> *is* initialized and <errcode> and/or <errmsg> is present, ! clear these fields. ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - ccp_is_initialized = allocated(this%std_name) + ccp_is_initialized = allocated(this%var_std_name) if (ccp_is_initialized) then - if (present(errflg)) then - errflg = 0 + if (present(errcode)) then + errcode = 0 end if if (present(errmsg)) then errmsg = '' end if else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then - write(errmsg, *) 'ccpp_constituent_properties_t object ', & + write(errmsg, *) 'ccpp_constituent_properties_t object ', & 'is not initialized' end if end if @@ -200,40 +205,40 @@ end function ccp_is_initialized !####################################################################### - subroutine ccp_initialize(this, std_name, vertical_dim, advected, & - errflg, errmsg) + subroutine ccp_initialize(this, std_name, long_name, vertical_dim, & + advected, errcode, errmsg) ! Initialize all fields in <this> ! Dummy arguments class(ccpp_constituent_properties_t), intent(inout) :: this character(len=*), intent(in) :: std_name + character(len=*), intent(in) :: long_name character(len=*), intent(in) :: vertical_dim logical, optional, intent(in) :: advected - integer, intent(out) :: errflg + integer, intent(out) :: errcode character(len=*), intent(out) :: errmsg ! Local variable integer :: astat if (this%is_initialized()) then - errflg = 1 + errcode = 1 write(errmsg, *) 'ccpp_constituent_properties_t object, ', & - trim(std_name), ', is already initialized as ', this%std_name + trim(std_name), ', is already initialized as ', this%var_std_name else - errflg = 0 + errcode = 0 errmsg = '' - this%std_name = trim(std_name) + this%var_std_name = trim(std_name) end if - if (errflg == 0) then + if (errcode == 0) then + this%var_long_name = trim(long_name) this%vert_dim = trim(vertical_dim) - end if - if (errflg == 0) then if (present(advected)) then this%advected = advected else this%advected = .false. end if end if - if (errflg /= 0) then + if (errcode /= 0) then call this%deallocate() end if end subroutine ccp_initialize @@ -246,8 +251,11 @@ subroutine ccp_deallocate(this) ! Dummy argument class(ccpp_constituent_properties_t), intent(inout) :: this - if (allocated(this%std_name)) then - deallocate(this%std_name) + if (allocated(this%var_std_name)) then + deallocate(this%var_std_name) + end if + if (allocated(this%var_long_name)) then + deallocate(this%var_long_name) end if if (allocated(this%vert_dim)) then deallocate(this%vert_dim) @@ -259,32 +267,48 @@ end subroutine ccp_deallocate !####################################################################### - subroutine ccp_get_standard_name(this, std_name, errflg, errmsg) + subroutine ccp_get_standard_name(this, std_name, errcode, errmsg) ! Return this constituent's standard name ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this character(len=*), intent(out) :: std_name - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then - std_name = this%std_name + if (this%is_initialized(errcode, errmsg)) then + std_name = this%var_std_name end if end subroutine ccp_get_standard_name !####################################################################### - subroutine ccp_get_vertical_dimension(this, vert_dim, errflg, errmsg) + subroutine ccp_get_long_name(this, long_name, errcode, errmsg) + ! Return this constituent's long name (description) + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + character(len=*), intent(out) :: long_name + integer, optional, intent(out) :: errcode + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errcode, errmsg)) then + long_name = this%var_long_name + end if + end subroutine ccp_get_long_name + + !####################################################################### + + subroutine ccp_get_vertical_dimension(this, vert_dim, errcode, errmsg) ! Return the standard name of this constituent's vertical dimension ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this character(len=*), intent(out) :: vert_dim - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then + if (this%is_initialized(errcode, errmsg)) then vert_dim = this%vert_dim end if end subroutine ccp_get_vertical_dimension @@ -336,30 +360,30 @@ end function ccp_is_2d_var !####################################################################### - integer function ccp_const_index(this, errflg, errmsg) + integer function ccp_const_index(this, errcode, errmsg) ! Return this constituent's master index (or -1 of not assigned) ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then + if (this%is_initialized(errcode, errmsg)) then ccp_const_index = this%const_ind end if end function ccp_const_index !####################################################################### - integer function ccp_field_index(this, errflg, errmsg) + integer function ccp_field_index(this, errcode, errmsg) ! Return this constituent's field index (or -1 of not assigned) ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then + if (this%is_initialized(errcode, errmsg)) then ccp_field_index = this%field_ind end if @@ -367,22 +391,22 @@ end function ccp_field_index !####################################################################### - subroutine ccp_set_const_index(this, index, errflg, errmsg) + subroutine ccp_set_const_index(this, index, errcode, errmsg) ! Set this constituent's index in the master constituent array ! It is an error to try to set an index if it is already set ! Dummy arguments class(ccpp_constituent_properties_t), intent(inout) :: this integer, intent(in) :: index - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then + if (this%is_initialized(errcode, errmsg)) then if (this%const_ind /= int_unassigned) then this%const_ind = index else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) 'ccpp_constituent_properties_t ', & @@ -395,22 +419,22 @@ end subroutine ccp_set_const_index !####################################################################### - subroutine ccp_set_field_index(this, findex, errflg, errmsg) + subroutine ccp_set_field_index(this, findex, errcode, errmsg) ! Set this constituent's field index ! It is an error to try to set an index if it is already set ! Dummy arguments class(ccpp_constituent_properties_t), intent(inout) :: this integer, intent(in) :: findex - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then + if (this%is_initialized(errcode, errmsg)) then if (this%field_ind == int_unassigned) then this%field_ind = findex else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) 'ccpp_constituent_properties_t ', & @@ -422,14 +446,14 @@ end subroutine ccp_set_field_index !####################################################################### - logical function ccp_is_advected(this, errflg, errmsg) + logical function ccp_is_advected(this, errcode, errmsg) ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg)) then + if (this%is_initialized(errcode, errmsg)) then ccp_is_advected = this%advected end if end function ccp_is_advected @@ -437,18 +461,19 @@ end function ccp_is_advected !####################################################################### logical function ccp_is_equivalent(this, oconst, & - errflg, errmsg) result(equiv) + errcode, errmsg) result(equiv) ! Dummy arguments class(ccpp_constituent_properties_t), intent(in) :: this type(ccpp_constituent_properties_t), intent(in) :: oconst - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg - if (this%is_initialized(errflg, errmsg) .and. & - oconst%is_initialized(errflg, errmsg)) then - equiv = (trim(this%std_name) == trim(oconst%std_name)) .and. & - (trim(this%vert_dim) == trim(oconst%vert_dim)) .and. & + if (this%is_initialized(errcode, errmsg) .and. & + oconst%is_initialized(errcode, errmsg)) then + equiv = (trim(this%var_std_name) == trim(oconst%var_std_name)) .and. & + (trim(this%var_long_name) == trim(oconst%var_long_name)) .and. & + (trim(this%vert_dim) == trim(oconst%vert_dim)) .and. & (this%advected .eqv. oconst%advected) else equiv = .false. @@ -462,20 +487,20 @@ end function ccp_is_equivalent ! !######################################################################## - logical function ccp_model_const_locked(this, errflg, errmsg, warn_func) + logical function ccp_model_const_locked(this, errcode, errmsg, warn_func) ! Return .true. iff <this> is locked (i.e., ready to use) - ! Optionally fill out <errflg> and <errmsg> if object not initialized + ! Optionally fill out <errcode> and <errmsg> if object not initialized ! Dummy arguments class(ccpp_model_constituents_t), intent(in) :: this - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg character(len=*), optional, intent(in) :: warn_func ! Local variable character(len=*), parameter :: subname = 'ccp_model_const_locked' - if (present(errflg)) then - errflg = 0 + if (present(errcode)) then + errcode = 0 end if if (present(errmsg)) then errmsg = '' @@ -487,13 +512,13 @@ logical function ccp_model_const_locked(this, errflg, errmsg, warn_func) if ( (.not. this%table_locked) .and. & present(errmsg) .and. present(warn_func)) then ! Write a warning as a courtesy to calling function but do not set - ! errflg (let caller decide). + ! errcode (let caller decide). write(errmsg, *) trim(warn_func), & ' WARNING: Model constituents not ready to use' end if else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then if (present(warn_func)) then @@ -510,13 +535,14 @@ end function ccp_model_const_locked !######################################################################## - logical function ccp_model_const_okay_to_add(this, errflg, errmsg, warn_func) + logical function ccp_model_const_okay_to_add(this, errcode, errmsg, & + warn_func) ! Return .true. iff <this> is initialized and not locked - ! Optionally fill out <errflg> and <errmsg> if the conditions are not met. + ! Optionally fill out <errcode> and <errmsg> if the conditions are not met. ! Dummy arguments class(ccpp_model_constituents_t), intent(inout) :: this - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg character(len=*), optional, intent(in) :: warn_func ! Local variable @@ -524,11 +550,11 @@ logical function ccp_model_const_okay_to_add(this, errflg, errmsg, warn_func) ccp_model_const_okay_to_add = this%hash_table%is_initialized() if (ccp_model_const_okay_to_add) then - ccp_model_const_okay_to_add = .not. this%locked(errflg=errflg, & + ccp_model_const_okay_to_add = .not. this%locked(errcode=errcode, & errmsg=errmsg, warn_func=subname) if (.not. ccp_model_const_okay_to_add) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then if (present(warn_func)) then @@ -540,8 +566,8 @@ logical function ccp_model_const_okay_to_add(this, errflg, errmsg, warn_func) end if end if else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then if (present(warn_func)) then @@ -557,25 +583,25 @@ end function ccp_model_const_okay_to_add !######################################################################## - subroutine ccp_model_const_add_metadata(this, field_data, errflg, errmsg) + subroutine ccp_model_const_add_metadata(this, field_data, errcode, errmsg) ! Add a constituent's metadata to the master hash table ! Dummy arguments class(ccpp_model_constituents_t), intent(inout) :: this type(ccpp_constituent_properties_t), target, intent(in) :: field_data - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables character(len=256) :: error character(len=*), parameter :: subnam = 'ccp_model_const_add_metadata' - if (this%okay_to_add(errflg=errflg, errmsg=errmsg, warn_func=subnam)) then + if (this%okay_to_add(errcode=errcode, errmsg=errmsg, warn_func=subnam)) then error = '' !!XXgoldyXX: Add check on key to see if incompatible item already there. call this%hash_table%add_hash_key(field_data, error) if (len_trim(error) > 0) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then errmsg = trim(error) @@ -589,12 +615,12 @@ subroutine ccp_model_const_add_metadata(this, field_data, errflg, errmsg) else if (field_data%is_2d_var()) then this%num_2d_vars = this%num_2d_vars + 1 else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call field_data%vertical_dimension(error, & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) if (len_trim(errmsg) == 0) then write(errmsg, *) "ERROR: Unknown vertical dimension, '", & trim(error), "'" @@ -603,8 +629,8 @@ subroutine ccp_model_const_add_metadata(this, field_data, errflg, errmsg) end if end if else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then errmsg = 'ERROR: Model contituents are locked' @@ -652,7 +678,7 @@ end subroutine ccp_model_const_initialize !######################################################################## - function ccp_model_const_find_const(this, standard_name, errflg, errmsg) & + function ccp_model_const_find_const(this, standard_name, errcode, errmsg) & result(cprop) ! Return a constituent with key, <standard_name>, from the hash table ! <this> must be locked to execute this function @@ -662,7 +688,7 @@ function ccp_model_const_find_const(this, standard_name, errflg, errmsg) & ! Dummy arguments class(ccpp_model_constituents_t), intent(in) :: this character(len=*), intent(in) :: standard_name - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg type(ccpp_constituent_properties_t), pointer :: cprop ! Local variables @@ -673,8 +699,8 @@ function ccp_model_const_find_const(this, standard_name, errflg, errmsg) & nullify(cprop) hval => this%hash_table%table_value(standard_name, errmsg=error) if (len_trim(error) > 0) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, ': ', trim(error) @@ -684,8 +710,8 @@ function ccp_model_const_find_const(this, standard_name, errflg, errmsg) & type is (ccpp_constituent_properties_t) cprop => hval class default - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, ' ERROR: Bad hash table value', & @@ -699,7 +725,7 @@ end function ccp_model_const_find_const !######################################################################## subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & - errflg, errmsg) + errcode, errmsg) ! Freeze hash table and initialize constituent field arrays ! Dummy arguments @@ -707,7 +733,7 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & integer, intent(in) :: ncols integer, intent(in) :: num_layers integer, intent(in) :: num_interfaces - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables integer :: index_layer @@ -721,9 +747,9 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & character(len=32) :: dimname character(len=*), parameter :: subname = 'ccp_model_const_lock' - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then - if (present(errflg)) then - errflg = 1 + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then if (len_trim(errmsg) == 0) then @@ -752,7 +778,7 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & ! Allocate the constituent array allocate(this%const_metadata(this%hash_table%num_values()), stat=astat) call handle_allocate_error(astat, 'const_metadata', & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) ! Iterate through the hash table to find entries if (astat == 0) then call hiter%initialize(this%hash_table) @@ -760,8 +786,8 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & if (hiter%valid()) then index_const = index_const + 1 if (index_const > SIZE(this%const_metadata)) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, & @@ -774,27 +800,27 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & type is (ccpp_constituent_properties_t) cprop => hval call cprop%set_const_index(index_const, & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) ! Figure out which type of variable this is if (cprop%is_layer_var()) then index_layer = index_layer + 1 call cprop%set_field_index(index_layer, & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) else if (cprop%is_interface_var()) then index_interface = index_interface + 1 call cprop%set_field_index(index_interface, & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) else if (cprop%is_2d_var()) then index_2d = index_2d + 1 call cprop%set_field_index(index_2d, & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call cprop%vertical_dimension(dimname, & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) if (len_trim(errmsg) == 0) then write(errmsg, *) subname, & " ERROR: Bad vertical dimension, '", & @@ -804,8 +830,8 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & end if this%const_metadata(index_const) = cprop class default - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, 'ERROR: Bad hash table value' @@ -819,16 +845,16 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & end do ! Some size sanity checks if (index_const /= this%hash_table%num_values()) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, & " ERROR: Too few constituents found in hash table" end if else if (index_layer /= this%num_layer_vars) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, '(2a,i0,a,i0)') subname, & @@ -836,8 +862,8 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & index_layer, ") should be ", this%num_layer_vars end if else if (index_interface /= this%num_interface_vars) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, '(2a,i0,a,i0)') subname, & @@ -845,8 +871,8 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & index_interface, ") should be ", this%num_interface_vars end if else if (index_2d /= this%num_2d_vars) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, '(2a,i0,a,i0)') subname, & @@ -858,27 +884,27 @@ subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & allocate(this%vars_layer(ncols, num_layers, index_layer), & stat=astat) call handle_allocate_error(astat, 'vars_layer', & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) if (astat == 0) then this%num_layers = num_layers this%vars_layer = kphys_unassigned allocate(this%vars_interface(ncols, num_interfaces, & index_layer), stat=astat) call handle_allocate_error(astat, 'vars_interface', & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) end if if (astat == 0) then this%num_interfaces = num_interfaces this%vars_interface = kphys_unassigned allocate(this%vars_2d(ncols, index_2d), stat=astat) call handle_allocate_error(astat, 'vars_2d', & - errflg=errflg, errmsg=errmsg) + errcode=errcode, errmsg=errmsg) end if if (astat == 0) then this%vars_2d = kphys_unassigned end if - if (present(errflg)) then - if (errflg /= 0) then + if (present(errcode)) then + if (errcode /= 0) then astat = 1 end if end if @@ -942,7 +968,7 @@ end function ccp_model_const_is_match !######################################################################## integer function ccp_model_const_num_match(this, advected, & - errflg, errmsg) result(nmatch) + errcode, errmsg) result(nmatch) ! Query number of constituents matching pattern ! Each (optional) property which is present represents something ! which is required as part of a match. @@ -951,14 +977,14 @@ integer function ccp_model_const_num_match(this, advected, & ! Dummy arguments class(ccpp_model_constituents_t), intent(in) :: this logical, optional, intent(in) :: advected - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables integer :: index character(len=*), parameter :: subname = "ccp_model_const_num_match" nmatch = 0 - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then do index = 1, SIZE(this%const_metadata) if (this%is_match(index, advected=advected)) then nmatch = nmatch + 1 @@ -971,7 +997,7 @@ end function ccp_model_const_num_match !######################################################################## subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & - errflg, errmsg) + errcode, errmsg) ! Gather constituent fields matching pattern ! Each (optional) property which is present represents something ! which is required as part of a match. @@ -981,7 +1007,7 @@ subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & class(ccpp_model_constituents_t), intent(in) :: this real(kind_phys), intent(out) :: const_array(:,:,:) logical, optional, intent(in) :: advected - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables integer :: index ! <this> const_metadata index @@ -992,7 +1018,7 @@ subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & character(len=64) :: std_name character(len=*), parameter :: subname = "ccp_model_const_copy_in_3d" - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then cindex = 0 max_cind = SIZE(const_array, 3) num_levels = SIZE(const_array, 2) @@ -1001,8 +1027,8 @@ subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & ! See if we have room for another constituent cindex = cindex + 1 if (cindex > max_cind) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, & @@ -1013,8 +1039,8 @@ subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & ! Copy this constituent's field data to <const_array> fld_ind = this%const_metadata(index)%field_index() if (fld_ind < 1) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call this%const_metadata(index)%standard_name(std_name) @@ -1025,8 +1051,8 @@ subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & if (this%num_layers == num_levels) then const_array(:,:,cindex) = this%vars_layer(:,:,fld_ind) else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call this%const_metadata(index)%standard_name(std_name) @@ -1041,8 +1067,8 @@ subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & if (this%num_interfaces == num_levels) then const_array(:,:,cindex) = this%vars_interface(:,:,fld_ind) else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call this%const_metadata(index)%standard_name(std_name) @@ -1063,7 +1089,7 @@ end subroutine ccp_model_const_copy_in_3d !######################################################################## subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & - errflg, errmsg) + errcode, errmsg) ! Update constituent fields matching pattern ! Each (optional) property which is present represents something ! which is required as part of a match. @@ -1073,7 +1099,7 @@ subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & class(ccpp_model_constituents_t), intent(inout) :: this real(kind_phys), intent(in) :: const_array(:,:,:) logical, optional, intent(in) :: advected - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables integer :: index ! <this> const_metadata index @@ -1084,7 +1110,7 @@ subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & character(len=64) :: std_name character(len=*), parameter :: subname = "ccp_model_const_copy_out_3d" - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then cindex = 0 max_cind = SIZE(const_array, 3) num_levels = SIZE(const_array, 2) @@ -1093,8 +1119,8 @@ subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & ! See if we have room for another constituent cindex = cindex + 1 if (cindex > max_cind) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then write(errmsg, *) subname, & @@ -1105,8 +1131,8 @@ subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & ! Copy this field of to <const_array> to constituent's field data fld_ind = this%const_metadata(index)%field_index() if (fld_ind < 1) then - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call this%const_metadata(index)%standard_name(std_name) @@ -1117,8 +1143,8 @@ subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & if (this%num_layers == num_levels) then this%vars_layer(:,:,fld_ind) = const_array(:,:,cindex) else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call this%const_metadata(index)%standard_name(std_name) @@ -1133,8 +1159,8 @@ subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & if (this%num_interfaces == num_levels) then this%vars_interface(:,:,fld_ind) = const_array(:,:,cindex) else - if (present(errflg)) then - errflg = 1 + if (present(errcode)) then + errcode = 1 end if if (present(errmsg)) then call this%const_metadata(index)%standard_name(std_name) @@ -1154,21 +1180,21 @@ end subroutine ccp_model_const_copy_out_3d !######################################################################## - integer function ccp_model_const_index(this, standard_name, errflg, errmsg) + integer function ccp_model_const_index(this, standard_name, errcode, errmsg) ! Return index of metadata matching <standard_name>. ! <this> must be locked to execute this function ! Dummy arguments class(ccpp_model_constituents_t), intent(in) :: this character(len=*), intent(in) :: standard_name - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables type(ccpp_constituent_properties_t), pointer :: cprop character(len=*), parameter :: subname = "ccp_model_const_index" - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then - cprop => this%find_const(standard_name, errflg=errflg, errmsg=errmsg) + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then + cprop => this%find_const(standard_name, errcode=errcode, errmsg=errmsg) if (associated(cprop)) then ccp_model_const_index = cprop%const_index() else @@ -1183,21 +1209,21 @@ end function ccp_model_const_index !######################################################################## integer function ccp_model_const_field_index(this, standard_name, & - errflg, errmsg) + errcode, errmsg) ! Return index of field matching <standard_name>. ! <this> must be locked to execute this function ! Dummy arguments class(ccpp_model_constituents_t), intent(in) :: this character(len=*), intent(in) :: standard_name - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables type(ccpp_constituent_properties_t), pointer :: cprop character(len=*), parameter :: subname = "ccp_model_field_index" - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then - cprop => this%find_const(standard_name, errflg=errflg, errmsg=errmsg) + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then + cprop => this%find_const(standard_name, errcode=errcode, errmsg=errmsg) if (associated(cprop)) then ccp_model_const_field_index = cprop%field_index() else @@ -1212,7 +1238,7 @@ end function ccp_model_const_field_index !######################################################################## subroutine ccp_model_const_metadata(this, standard_name, const_data, & - errflg, errmsg) + errcode, errmsg) ! Return metadata matching standard name ! <this> must be locked to execute this function @@ -1220,14 +1246,14 @@ subroutine ccp_model_const_metadata(this, standard_name, const_data, & class(ccpp_model_constituents_t), intent(in) :: this character(len=*), intent(in) :: standard_name type(ccpp_constituent_properties_t), intent(out) :: const_data - integer, optional, intent(out) :: errflg + integer, optional, intent(out) :: errcode character(len=*), optional, intent(out) :: errmsg ! Local variables type(ccpp_constituent_properties_t), pointer :: cprop character(len=*), parameter :: subname = "ccp_model_const_metadata" - if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then - cprop => this%find_const(standard_name, errflg=errflg, errmsg=errmsg) + if (this%locked(errcode=errcode, errmsg=errmsg, warn_func=subname)) then + cprop => this%find_const(standard_name, errcode=errcode, errmsg=errmsg) if (associated(cprop)) then const_data = cprop end if diff --git a/test/advection_test/cld_ice.meta b/test/advection_test/cld_ice.meta index 9dd77db2..eed4d90e 100644 --- a/test/advection_test/cld_ice.meta +++ b/test/advection_test/cld_ice.meta @@ -57,9 +57,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -90,9 +90,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/advection_test/cld_liq.meta b/test/advection_test/cld_liq.meta index 4e071091..90cd3b10 100644 --- a/test/advection_test/cld_liq.meta +++ b/test/advection_test/cld_liq.meta @@ -63,9 +63,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -102,9 +102,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/advection_test/run_test b/test/advection_test/run_test index 905cf1e4..d1cbed2b 100755 --- a/test/advection_test/run_test +++ b/test/advection_test/run_test @@ -128,7 +128,7 @@ process_list="" module_list="cld_ice,cld_liq" dependencies="" suite_list="cld_suite" -required_vars="ccpp_error_flag,ccpp_error_message" +required_vars="ccpp_error_code,ccpp_error_message" required_vars="${required_vars},cloud_ice_dry_mixing_ratio" required_vars="${required_vars},cloud_liquid_dry_mixing_ratio" required_vars="${required_vars},horizontal_loop_begin" @@ -144,7 +144,7 @@ input_vars="${input_vars},horizontal_loop_end" input_vars="${input_vars},surface_air_pressure,temperature" input_vars="${input_vars},time_step_for_physics,water_temperature_at_freezing" input_vars="${input_vars},water_vapor_specific_humidity" -output_vars="ccpp_error_flag,ccpp_error_message" +output_vars="ccpp_error_code,ccpp_error_message" output_vars="${output_vars},cloud_ice_dry_mixing_ratio" output_vars="${output_vars},cloud_liquid_dry_mixing_ratio" output_vars="${output_vars},temperature" diff --git a/test/advection_test/test_host.F90 b/test/advection_test/test_host.F90 index 3e9add58..61bd8657 100644 --- a/test/advection_test/test_host.F90 +++ b/test/advection_test/test_host.F90 @@ -11,6 +11,9 @@ module test_prog integer, public, parameter :: cs = 16 integer, public, parameter :: cm = 36 + !> \section arg_table_suite_info Argument Table + !! \htmlinclude arg_table_suite_info.html + !! type, public :: suite_info character(len=cs) :: suite_name = '' character(len=cs), pointer :: suite_parts(:) => NULL() @@ -426,7 +429,7 @@ program test 'water_vapor_specific_humidity ' /) character(len=cm), target :: test_outvars1(6) = (/ & 'ccpp_error_message ', & - 'ccpp_error_flag ', & + 'ccpp_error_code ', & 'temperature ', & 'water_vapor_specific_humidity ', & 'cloud_liquid_dry_mixing_ratio ', & @@ -440,7 +443,7 @@ program test 'water_temperature_at_freezing ', & 'water_vapor_specific_humidity ', & 'ccpp_error_message ', & - 'ccpp_error_flag ' /) + 'ccpp_error_code ' /) type(suite_info) :: test_suites(1) logical :: run_okay diff --git a/test/advection_test/test_host.meta b/test/advection_test/test_host.meta index d648baf7..b8aa21f8 100644 --- a/test/advection_test/test_host.meta +++ b/test/advection_test/test_host.meta @@ -1,3 +1,10 @@ +[ccpp-table-properties] + name = suite_info + type = ddt +[ccpp-arg-table] + name = suite_info + type = ddt + [ccpp-table-properties] name = test_host type = host @@ -24,8 +31,8 @@ type = character kind = len=512 [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer diff --git a/test/advection_test/test_host_data.meta b/test/advection_test/test_host_data.meta index 9e03d268..d256d2ec 100644 --- a/test/advection_test/test_host_data.meta +++ b/test/advection_test/test_host_data.meta @@ -21,7 +21,7 @@ state_variable = true type = real kind = kind_phys - units = kg/kg moist or dry air depending on type + units = kg kg-1 moist or dry air depending on type dimensions = (horizontal_dimension, vertical_layer_dimension, number_of_tracers) [ q(:,:,index_of_water_vapor_specific_humidity) ] standard_name = water_vapor_specific_humidity diff --git a/test/advection_test/test_reports.py b/test/advection_test/test_reports.py index a5a706cb..00e490a6 100644 --- a/test/advection_test/test_reports.py +++ b/test/advection_test/test_reports.py @@ -61,7 +61,7 @@ def usage(errmsg=None): _PROCESS_LIST = list() _MODULE_LIST = ["cld_ice", "cld_liq"] _SUITE_LIST = ["cld_suite"] -_REQUIRED_VARS_CLD = ["ccpp_error_flag", "ccpp_error_message", +_REQUIRED_VARS_CLD = ["ccpp_error_code", "ccpp_error_message", "horizontal_loop_begin", "horizontal_loop_end", "surface_air_pressure", "temperature", "time_step_for_physics", "water_temperature_at_freezing", @@ -74,7 +74,7 @@ def usage(errmsg=None): "water_vapor_specific_humidity", "cloud_ice_dry_mixing_ratio", "cloud_liquid_dry_mixing_ratio"] -_OUTPUT_VARS_CLD = ["ccpp_error_flag", "ccpp_error_message", +_OUTPUT_VARS_CLD = ["ccpp_error_code", "ccpp_error_message", "water_vapor_specific_humidity", "temperature", "cloud_ice_dry_mixing_ratio", "cloud_liquid_dry_mixing_ratio"] diff --git a/test/capgen_test/environ_conditions.meta b/test/capgen_test/environ_conditions.meta index 114f151f..9eb4e7b9 100644 --- a/test/capgen_test/environ_conditions.meta +++ b/test/capgen_test/environ_conditions.meta @@ -21,9 +21,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -72,9 +72,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -102,9 +102,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/capgen_test/make_ddt.meta b/test/capgen_test/make_ddt.meta index 2d1f766d..236c6e4e 100644 --- a/test/capgen_test/make_ddt.meta +++ b/test/capgen_test/make_ddt.meta @@ -61,9 +61,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -90,9 +90,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -119,9 +119,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/capgen_test/run_test b/test/capgen_test/run_test index c585092a..85f10282 100755 --- a/test/capgen_test/run_test +++ b/test/capgen_test/run_test @@ -132,7 +132,7 @@ process_list="setter=temp_set,adjusting=temp_calc_adjust" module_list="environ_conditions,make_ddt,temp_adjust,temp_calc_adjust,temp_set" dependencies="bar.F90,foo.F90,qux.F90" suite_list="ddt_suite;temp_suite" -required_vars_ddt="ccpp_error_flag,ccpp_error_message,horizontal_dimension" +required_vars_ddt="ccpp_error_code,ccpp_error_message,horizontal_dimension" required_vars_ddt="${required_vars_ddt},horizontal_loop_begin" required_vars_ddt="${required_vars_ddt},horizontal_loop_end" required_vars_ddt="${required_vars_ddt},model_times" @@ -143,9 +143,9 @@ input_vars_ddt="${input_vars_ddt},horizontal_loop_begin" input_vars_ddt="${input_vars_ddt},horizontal_loop_end" input_vars_ddt="${input_vars_ddt},model_times,number_of_model_times" input_vars_ddt="${input_vars_ddt},surface_air_pressure" -output_vars_ddt="ccpp_error_flag,ccpp_error_message" +output_vars_ddt="ccpp_error_code,ccpp_error_message" output_vars_ddt="${output_vars_ddt},model_times,number_of_model_times" -required_vars_temp="ccpp_error_flag,ccpp_error_message,horizontal_dimension" +required_vars_temp="ccpp_error_code,ccpp_error_message,horizontal_dimension" required_vars_temp="${required_vars_temp},horizontal_loop_begin" required_vars_temp="${required_vars_temp},horizontal_loop_end" required_vars_temp="${required_vars_temp},potential_temperature" @@ -164,7 +164,7 @@ input_vars_temp="${input_vars_temp},potential_temperature_increment" input_vars_temp="${input_vars_temp},surface_air_pressure,time_step_for_physics" input_vars_temp="${input_vars_temp},vertical_layer_dimension" input_vars_temp="${input_vars_temp},water_vapor_specific_humidity" -output_vars_temp="ccpp_error_flag,ccpp_error_message,potential_temperature" +output_vars_temp="ccpp_error_code,ccpp_error_message,potential_temperature" output_vars_temp="${output_vars_temp},potential_temperature_at_interface" output_vars_temp="${output_vars_temp},surface_air_pressure" output_vars_temp="${output_vars_temp},water_vapor_specific_humidity" diff --git a/test/capgen_test/temp_adjust.meta b/test/capgen_test/temp_adjust.meta index 14ad0051..1f8fda2e 100644 --- a/test/capgen_test/temp_adjust.meta +++ b/test/capgen_test/temp_adjust.meta @@ -58,9 +58,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -76,9 +76,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -94,9 +94,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/capgen_test/temp_calc_adjust.meta b/test/capgen_test/temp_calc_adjust.meta index 2ea400c5..fad025c5 100644 --- a/test/capgen_test/temp_calc_adjust.meta +++ b/test/capgen_test/temp_calc_adjust.meta @@ -44,9 +44,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -62,9 +62,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -80,9 +80,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/capgen_test/temp_set.meta b/test/capgen_test/temp_set.meta index 2b8d8827..6bcb3cc9 100644 --- a/test/capgen_test/temp_set.meta +++ b/test/capgen_test/temp_set.meta @@ -56,9 +56,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -100,9 +100,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -140,9 +140,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -159,9 +159,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/capgen_test/test_host.F90 b/test/capgen_test/test_host.F90 index 3681ce8b..55c7159e 100644 --- a/test/capgen_test/test_host.F90 +++ b/test/capgen_test/test_host.F90 @@ -11,6 +11,9 @@ module test_prog integer, public, parameter :: cs = 16 integer, public, parameter :: cm = 36 + !> \section arg_table_suite_info Argument Table + !! \htmlinclude arg_table_suite_info.html + !! type, public :: suite_info character(len=cs) :: suite_name = '' character(len=cs), pointer :: suite_parts(:) => NULL() @@ -366,7 +369,7 @@ program test 'potential_temperature_at_interface ', & 'surface_air_pressure ', & 'water_vapor_specific_humidity ', & - 'ccpp_error_flag ', & + 'ccpp_error_code ', & 'ccpp_error_message ' /) character(len=cm), target :: test_reqvars1(8) = (/ & 'potential_temperature ', & @@ -375,7 +378,7 @@ program test 'water_vapor_specific_humidity ', & 'potential_temperature_increment ', & 'time_step_for_physics ', & - 'ccpp_error_flag ', & + 'ccpp_error_code ', & 'ccpp_error_message ' /) character(len=cm), target :: test_invars2(3) = (/ & @@ -384,7 +387,7 @@ program test 'surface_air_pressure ' /) character(len=cm), target :: test_outvars2(4) = (/ & - 'ccpp_error_flag ', & + 'ccpp_error_code ', & 'ccpp_error_message ', & 'model_times ', & 'number_of_model_times ' /) @@ -393,7 +396,7 @@ program test 'model_times ', & 'number_of_model_times ', & 'surface_air_pressure ', & - 'ccpp_error_flag ', & + 'ccpp_error_code ', & 'ccpp_error_message ' /) type(suite_info) :: test_suites(2) logical :: run_okay diff --git a/test/capgen_test/test_host.meta b/test/capgen_test/test_host.meta index d648baf7..b8aa21f8 100644 --- a/test/capgen_test/test_host.meta +++ b/test/capgen_test/test_host.meta @@ -1,3 +1,10 @@ +[ccpp-table-properties] + name = suite_info + type = ddt +[ccpp-arg-table] + name = suite_info + type = ddt + [ccpp-table-properties] name = test_host type = host @@ -24,8 +31,8 @@ type = character kind = len=512 [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer diff --git a/test/capgen_test/test_host_data.meta b/test/capgen_test/test_host_data.meta index 7185aed8..8a54a3ed 100644 --- a/test/capgen_test/test_host_data.meta +++ b/test/capgen_test/test_host_data.meta @@ -40,7 +40,7 @@ state_variable = true type = real kind = kind_phys - units = kg/kg moist or dry air depending on type + units = kg kg-1 moist or dry air depending on type dimensions = (horizontal_dimension, vertical_layer_dimension, number_of_tracers) [ q(:,:,index_of_water_vapor_specific_humidity) ] standard_name = water_vapor_specific_humidity diff --git a/test/capgen_test/test_reports.py b/test/capgen_test/test_reports.py index 43adbe38..45e7c14d 100644 --- a/test/capgen_test/test_reports.py +++ b/test/capgen_test/test_reports.py @@ -69,12 +69,12 @@ def usage(errmsg=None): _INPUT_VARS_DDT = ["model_times", "number_of_model_times", "horizontal_loop_begin", "horizontal_loop_end", "surface_air_pressure", "horizontal_dimension"] -_OUTPUT_VARS_DDT = ["ccpp_error_flag", "ccpp_error_message", "model_times", +_OUTPUT_VARS_DDT = ["ccpp_error_code", "ccpp_error_message", "model_times", "number_of_model_times"] _REQUIRED_VARS_DDT = _INPUT_VARS_DDT + _OUTPUT_VARS_DDT _PROT_VARS_TEMP = ["horizontal_loop_begin", "horizontal_loop_end", "horizontal_dimension", "vertical_layer_dimension"] -_REQUIRED_VARS_TEMP = ["ccpp_error_flag", "ccpp_error_message", +_REQUIRED_VARS_TEMP = ["ccpp_error_code", "ccpp_error_message", "potential_temperature", "potential_temperature_at_interface", "potential_temperature_increment", @@ -85,7 +85,7 @@ def usage(errmsg=None): "potential_temperature_increment", "surface_air_pressure", "time_step_for_physics", "water_vapor_specific_humidity"] -_OUTPUT_VARS_TEMP = ["ccpp_error_flag", "ccpp_error_message", +_OUTPUT_VARS_TEMP = ["ccpp_error_code", "ccpp_error_message", "potential_temperature", "potential_temperature_at_interface", "surface_air_pressure", "water_vapor_specific_humidity"] diff --git a/test/run_tests.sh b/test/run_tests.sh new file mode 100755 index 00000000..83b89c73 --- /dev/null +++ b/test/run_tests.sh @@ -0,0 +1,40 @@ +#! /bin/bash + +root=$( dirname $( cd $( dirname ${0}); pwd -P ) ) +test_dir=${root}/test + +perr() { + # Print error message ($2) on error ($1) + if [ ${1} -ne 0 ]; then + echo "ERROR: ${2}" + if [ $# -gt 2 ]; then + exit ${3} + else + exit 1 + fi + fi +} + + +cd ${test_dir} +perr $? "Cannot cd to test directory, '${test_dir}'" + +# Run capgen test +./capgen_test/run_test +perr $? "Failure running capgen test" + +# Run advection test +./advection_test/run_test +perr $? "Failure running advection test" + +# Run doctests +./run_doctest.sh +perr $? "Failure running doctests" + +for test in `ls unit_tests/test_*.py`; do + echo "Running unit test, ${test}" + python3 ${test} + perr $? "Failure running unit test, ${test}" +done + +echo "All tests PASSed!" diff --git a/test/unit_tests/sample_files/test_host.meta b/test/unit_tests/sample_files/test_host.meta index b0a7d603..45df6537 100644 --- a/test/unit_tests/sample_files/test_host.meta +++ b/test/unit_tests/sample_files/test_host.meta @@ -27,8 +27,8 @@ type = character kind = len=512 [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer diff --git a/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta b/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta index 7d81172e..03733d4d 100644 --- a/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta +++ b/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta @@ -58,9 +58,9 @@ units = 1 | dimensions = () | type = character | kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -88,9 +88,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -107,9 +107,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta b/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta index 23220216..835f2cb3 100644 --- a/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta @@ -29,9 +29,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta index 157117ce..ac0487b3 100644 --- a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta @@ -29,9 +29,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta index e416cf3c..b3d1e4ec 100644 --- a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta @@ -21,9 +21,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta b/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta index c6da9e1c..064bf551 100644 --- a/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta +++ b/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta @@ -29,9 +29,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta b/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta index 4fb2c868..8790439e 100644 --- a/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta +++ b/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta @@ -21,9 +21,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta b/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta index d3364ce7..15606af0 100644 --- a/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta +++ b/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta @@ -58,9 +58,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta b/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta index 099c7686..f4c9a3b3 100644 --- a/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta +++ b/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta @@ -15,9 +15,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/mismatch_intent.meta b/test/unit_tests/sample_scheme_files/mismatch_intent.meta index 64b8733a..ff80c5d8 100644 --- a/test/unit_tests/sample_scheme_files/mismatch_intent.meta +++ b/test/unit_tests/sample_scheme_files/mismatch_intent.meta @@ -58,9 +58,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -76,9 +76,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -94,9 +94,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/missing_arg_table.meta b/test/unit_tests/sample_scheme_files/missing_arg_table.meta index a221dcbd..7879a65b 100644 --- a/test/unit_tests/sample_scheme_files/missing_arg_table.meta +++ b/test/unit_tests/sample_scheme_files/missing_arg_table.meta @@ -15,9 +15,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -33,9 +33,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/missing_fort_header.meta b/test/unit_tests/sample_scheme_files/missing_fort_header.meta index 4767dc63..89a3cf3d 100644 --- a/test/unit_tests/sample_scheme_files/missing_fort_header.meta +++ b/test/unit_tests/sample_scheme_files/missing_fort_header.meta @@ -58,9 +58,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -76,9 +76,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -94,9 +94,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/reorder.meta b/test/unit_tests/sample_scheme_files/reorder.meta index 1a64ebbf..cc094e53 100644 --- a/test/unit_tests/sample_scheme_files/reorder.meta +++ b/test/unit_tests/sample_scheme_files/reorder.meta @@ -15,9 +15,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -76,9 +76,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -94,9 +94,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/sample_scheme_files/temp_adjust.meta b/test/unit_tests/sample_scheme_files/temp_adjust.meta index 9ff7d48b..e88108f6 100644 --- a/test/unit_tests/sample_scheme_files/temp_adjust.meta +++ b/test/unit_tests/sample_scheme_files/temp_adjust.meta @@ -58,9 +58,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -76,9 +76,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out @@ -94,9 +94,9 @@ kind = len=512 intent = out [ errflg ] - standard_name = ccpp_error_flag + standard_name = ccpp_error_code long_name = Error flag for error handling in CCPP - units = flag + units = 1 dimensions = () type = integer intent = out diff --git a/test/unit_tests/test_metadata_scheme_file.py b/test/unit_tests/test_metadata_scheme_file.py index 0571da37..52f62aa1 100644 --- a/test/unit_tests/test_metadata_scheme_file.py +++ b/test/unit_tests/test_metadata_scheme_file.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ ----------------------------------------------------------------------- @@ -58,6 +58,7 @@ # pylint: disable=wrong-import-position from ccpp_capgen import parse_scheme_files +from framework_env import CCPPFrameworkEnv # pylint: enable=wrong-import-position class MetadataHeaderTestCase(unittest.TestCase): @@ -66,16 +67,31 @@ class MetadataHeaderTestCase(unittest.TestCase): def setUp(self): """Setup important directories and logging""" self._sample_files_dir = os.path.join(_TEST_DIR, "sample_scheme_files") - self._logger = logging.getLogger(self.__class__.__name__) + logger = logging.getLogger(self.__class__.__name__) + self._run_env = CCPPFrameworkEnv(logger, ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) + self._run_env_ccpp = CCPPFrameworkEnv(logger, + ndict={'host_files':'', + 'scheme_files':'', + 'suites':'', + 'preproc_directives': + 'CCPP=1'}) + self._run_env_ccpp2 = CCPPFrameworkEnv(logger, + ndict={'host_files':'', + 'scheme_files':'', + 'suites':'', + 'preproc_directives': + 'CCPP=2'}) def test_good_scheme_file(self): """Test that good metadata file matches the Fortran, with routines in the same order """ #Setup - scheme_files = [os.path.join(self._sample_files_dir, "temp_adjust.meta")] - preproc_defs = {} + scheme_files = [os.path.join(self._sample_files_dir, + "temp_adjust.meta")] #Exercise - scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, - self._logger) + scheme_headers, table_dict = parse_scheme_files(scheme_files, + self._run_env) #Verify size of returned list equals number of scheme headers in the test file # and that header (subroutine) names are 'temp_adjust_[init,run,finalize]' self.assertEqual(len(scheme_headers), 3) @@ -92,10 +108,9 @@ def test_reordered_scheme_file(self): """Test that metadata file matches the Fortran when the routines are not in the same order """ #Setup scheme_files = [os.path.join(self._sample_files_dir, "reorder.meta")] - preproc_defs = {} #Exercise - scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, - self._logger) + scheme_headers, table_dict = parse_scheme_files(scheme_files, + self._run_env) #Verify size of returned list equals number of scheme headers in the test file # and that header (subroutine) names are 'reorder_[init,run,finalize]' self.assertEqual(len(scheme_headers), 3) @@ -112,10 +127,9 @@ def test_missing_metadata_header(self): """Test that a missing metadata header (aka arg table) is corretly detected """ #Setup scheme_files = [os.path.join(self._sample_files_dir, "missing_arg_table.meta")] - preproc_defs = {} #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env) #Verify correct error message returned emsg = "No matching metadata header found for missing_arg_table_run in" self.assertTrue(emsg in str(context.exception)) @@ -124,10 +138,9 @@ def test_missing_fortran_header(self): """Test that a missing fortran header is corretly detected """ #Setup scheme_files = [os.path.join(self._sample_files_dir, "missing_fort_header.meta")] - preproc_defs = {} #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env) #Verify correct error message returned emsg = "No matching Fortran routine found for missing_fort_header_run in" self.assertTrue(emsg in str(context.exception)) @@ -136,10 +149,9 @@ def test_mismatch_intent(self): """Test that differing intent, kind, rank, and type between metadata and fortran is corretly detected """ #Setup scheme_files = [os.path.join(self._sample_files_dir, "mismatch_intent.meta")] - preproc_defs = {} #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env) #Verify 4 correct error messages returned self.assertTrue('intent mismatch (in != inout) in mismatch_intent_run, at' in str(context.exception)) self.assertTrue('kind mismatch (kind_fizz != kind_phys) in mismatch_intent_run, at' in str(context.exception)) @@ -151,10 +163,9 @@ def test_invalid_subr_stmnt(self): """Test that invalid Fortran subroutine statements are correctly detected """ #Setup scheme_files = [os.path.join(self._sample_files_dir, "invalid_subr_stmnt.meta")] - preproc_defs = {} #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env) #Verify correct error message returned self.assertTrue("Invalid dummy argument, 'errmsg', at" in str(context.exception)) @@ -162,10 +173,9 @@ def test_invalid_dummy_arg(self): """Test that invalid dummy argument statements are correctly detected """ #Setup scheme_files = [os.path.join(self._sample_files_dir, "invalid_dummy_arg.meta")] - preproc_defs = {} #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env) #Verify correct error message returned self.assertTrue("Invalid dummy argument, 'woohoo', at" in str(context.exception)) @@ -175,10 +185,9 @@ def test_CCPPnotset_var_missing_in_meta(self): (due to an undefined pre-processor directive: #ifndef CCPP), BUT IS NOT PRESENT in meta file""" #Setup scheme_files = [os.path.join(self._sample_files_dir, "CCPPnotset_var_missing_in_meta.meta")] - preproc_defs = {} # CCPP directive is not set #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env) #Verify 3 correct error messages returned self.assertTrue('Variable mismatch in CCPPnotset_var_missing_in_meta_run, variables missing from metadata header.' in str(context.exception)) @@ -191,10 +200,9 @@ def test_CCPPeq1_var_missing_in_fort(self): (due to a pre-processor directive: #ifndef CCPP), but IS PRESENT in meta file""" #Setup scheme_files = [os.path.join(self._sample_files_dir, "CCPPeq1_var_missing_in_fort.meta")] - preproc_defs = {'CCPP':1} # Set CCPP directive #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env_ccpp) #Verify 3 correct error messages returned self.assertTrue('Variable mismatch in CCPPeq1_var_missing_in_fort_run, variables missing from Fortran scheme.' in str(context.exception)) @@ -208,9 +216,9 @@ def test_CCPPeq1_var_in_fort_meta(self): (due to a pre-processor directive: #ifdef CCPP), and IS PRESENT in meta file""" #Setup scheme_files = [os.path.join(self._sample_files_dir, "CCPPeq1_var_in_fort_meta.meta")] - preproc_defs = {'CCPP':1} # Set CCPP directive #Exercise - scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, self._logger) + scheme_headers, table_dict = parse_scheme_files(scheme_files, + self._run_env_ccpp) #Verify size of returned list equals number of scheme headers in the test file (1) # and that header (subroutine) name is 'CCPPeq1_var_in_fort_meta_run' self.assertEqual(len(scheme_headers), 1) @@ -227,9 +235,10 @@ def test_CCPPgt1_var_in_fort_meta(self): (due to a pre-processor directive: #if CCPP > 1), and IS PRESENT in meta file""" #Setup scheme_files = [os.path.join(self._sample_files_dir, "CCPPgt1_var_in_fort_meta.meta")] - preproc_defs = {'CCPP':2} # Set CCPP directive to > 1 #Exercise - scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, self._logger) + # Set CCPP directive to > 1 + scheme_headers, table_dict = parse_scheme_files(scheme_files, + self._run_env_ccpp2) #Verify size of returned list equals number of scheme headers in the test file (1) # and that header (subroutine) name is 'CCPPgt1_var_in_fort_meta_init' self.assertEqual(len(scheme_headers), 1) @@ -246,10 +255,9 @@ def test_CCPPgt1_var_in_fort_meta2(self): (due to a pre-processor directive: #if CCPP > 1), but IS PRESENT in meta file""" #Setup scheme_files = [os.path.join(self._sample_files_dir, "CCPPgt1_var_in_fort_meta.meta")] - preproc_defs = {'CCPP':1} # Set CCPP directive to 1 #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env_ccpp) #Verify 3 correct error messages returned self.assertTrue('Variable mismatch in CCPPgt1_var_in_fort_meta_init, variables missing from Fortran scheme.' in str(context.exception)) @@ -263,10 +271,9 @@ def test_CCPPeq1_var_missing_in_meta(self): (due to a pre-processor directive: #ifdef CCPP), and IS NOT PRESENT in meta file""" #Setup scheme_files = [os.path.join(self._sample_files_dir, "CCPPeq1_var_missing_in_meta.meta")] - preproc_defs = {'CCPP':1} # Set CCPP directive #Exercise with self.assertRaises(Exception) as context: - parse_scheme_files(scheme_files, preproc_defs, self._logger) + parse_scheme_files(scheme_files, self._run_env_ccpp) #Verify 3 correct error messages returned self.assertTrue('Variable mismatch in CCPPeq1_var_missing_in_meta_finalize, variables missing from metadata header.' in str(context.exception)) diff --git a/test/unit_tests/test_metadata_table.py b/test/unit_tests/test_metadata_table.py index a8014f52..ce314e8b 100644 --- a/test/unit_tests/test_metadata_table.py +++ b/test/unit_tests/test_metadata_table.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ ----------------------------------------------------------------------- Description: Contains unit tests for parse_metadata_file @@ -26,20 +26,24 @@ # pylint: disable=wrong-import-position from metadata_table import parse_metadata_file, MetadataTable +from framework_env import CCPPFrameworkEnv # pylint: enable=wrong-import-position class MetadataTableTestCase(unittest.TestCase): """Tests for `parse_metadata_file`.""" + _DUMMY_RUN_ENV = CCPPFrameworkEnv(None, ndict={'host_files':'', + 'scheme_files':'', + 'suites':''}) + def test_good_host_file(self): """Test that good host file test_host.meta returns one header named test_host""" #Setup known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_host.meta") #Exercise - result = parse_metadata_file(filename, known_ddts, logger) + result = parse_metadata_file(filename, known_ddts, self._DUMMY_RUN_ENV) #Verify that: # no dependencies is returned as '' # rel_path is returned as None @@ -57,10 +61,9 @@ def test_good_host_file(self): def test_good_multi_ccpp_arg_table(self): """Test that good file with 4 ccpp-arg-table returns 4 headers""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_multi_ccpp_arg_tables.meta") #Exercise - result = parse_metadata_file(filename, known_ddts, logger) + result = parse_metadata_file(filename, known_ddts, self._DUMMY_RUN_ENV) #Verify that size of returned list equals number of ccpp-table-properties in the test file # ccpp-arg-tables are returned in result[0].sections() and result[1].sections() self.assertEqual(len(result), 2) @@ -78,12 +81,12 @@ def test_bad_type_name(self): """Test that `type = banana` returns expected error""" #Setup known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_type_name.meta") #Exercise with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #Verify #print("The exception is", context.exception) @@ -92,11 +95,11 @@ def test_bad_type_name(self): def test_double_header(self): """Test that a duplicate header returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "double_header.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) self.assertTrue('table already contains \'test_host\'' in str(context.exception)) @@ -104,11 +107,11 @@ def test_double_header(self): def test_bad_dimension(self): """Test that `dimension = banana` returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_dimension.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) self.assertTrue('Invalid \'dimensions\' property value, \'' in str(context.exception)) @@ -116,11 +119,12 @@ def test_bad_dimension(self): def test_duplicate_variable(self): """Test that a duplicate variable returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_duplicate_variable.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_duplicate_variable.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) self.assertTrue('Invalid (duplicate) standard name in temp_calc_adjust_run, defined at ' in str(context.exception)) @@ -128,11 +132,11 @@ def test_duplicate_variable(self): def test_invalid_intent(self): """Test that an invalid intent returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_invalid_intent.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) self.assertTrue('Invalid \'intent\' property value, \'banana\', at ' in str(context.exception)) @@ -140,11 +144,11 @@ def test_invalid_intent(self): def test_missing_intent(self): """Test that a missing intent returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_intent.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Required property, 'intent', missing, at " @@ -153,11 +157,11 @@ def test_missing_intent(self): def test_missing_units(self): """Test that a missing units attribute returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_units.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Required property, 'units', missing, at" @@ -166,11 +170,12 @@ def test_missing_units(self): def test_missing_table_type(self): """Test that a missing table type returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_table_type.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_missing_table_type.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid section type, 'None'" @@ -179,11 +184,11 @@ def test_missing_table_type(self): def test_bad_table_type(self): """Test that a mismatched table type returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_table_type.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Section type, 'host', does not match table type, 'scheme'" @@ -192,11 +197,12 @@ def test_bad_table_type(self): def test_missing_table_name(self): """Test that a missing table name returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_table_name.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_missing_table_name.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Section name, 'None', does not match table title, 'test_missing_table_name'" @@ -205,11 +211,11 @@ def test_missing_table_name(self): def test_bad_table_key(self): """Test that a bad table key returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_table_key.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid metadata table start property, 'something', at " @@ -218,11 +224,11 @@ def test_bad_table_key(self): def test_bad_line_split(self): """Test that a bad split line with | returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_line_split.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid variable property syntax, \'\', at " @@ -231,11 +237,11 @@ def test_bad_line_split(self): def test_unknown_ddt_type(self): """Test that a DDT type = banana returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_unknown_ddt_type.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Unknown DDT type, banana, at " @@ -244,11 +250,11 @@ def test_unknown_ddt_type(self): def test_bad_var_property_name(self): """Test that a ddt_type = None returns expected error""" known_ddts = list() - logger = None filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_var_property_name.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid variable property name, 'none', at " @@ -257,7 +263,7 @@ def test_bad_var_property_name(self): def test_no_input(self): """Test that no input returns expected error""" with self.assertRaises(Exception) as context: - MetadataTable() + MetadataTable(self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "MetadataTable requires a name" @@ -266,9 +272,10 @@ def test_no_input(self): def test_no_table_type(self): """Test that __init__ with table_type_in=None returns expected error""" with self.assertRaises(Exception) as context: - MetadataTable(table_name_in="something", table_type_in=None, dependencies=None, \ - relative_path=None, known_ddts=None, var_dict=None, module=None, \ - parse_object=None, logger=None) + MetadataTable(self._DUMMY_RUN_ENV, table_name_in="something", + table_type_in=None, dependencies=None, + relative_path=None, known_ddts=None, var_dict=None, + module=None, parse_object=None) #print("The exception is", context.exception) emsg = "MetadataTable requires a table type" @@ -277,9 +284,10 @@ def test_no_table_type(self): def test_bad_header_type(self): """Test that __init__ with table_type_in=banana returns expected error""" with self.assertRaises(Exception) as context: - MetadataTable(table_name_in="something", table_type_in="banana", dependencies=None, \ - relative_path=None, known_ddts=None, var_dict=None, module=None, \ - parse_object=None, logger=None) + MetadataTable(self._DUMMY_RUN_ENV, table_name_in="something", + table_type_in="banana", dependencies=None, + relative_path=None, known_ddts=None, var_dict=None, + module=None, parse_object=None) #print("The exception is", context.exception) emsg = "Invalid metadata arg table type, 'banana'" @@ -288,9 +296,10 @@ def test_bad_header_type(self): def test_no_module(self): """Test that __init__ with module=None returns expected error""" with self.assertRaises(Exception) as context: - MetadataTable(table_name_in=None, table_type_in=None, dependencies=None, \ - relative_path=None, known_ddts=None, var_dict=None, module=None, \ - parse_object=None, logger=None) + MetadataTable(self._DUMMY_RUN_ENV, table_name_in=None, + table_type_in=None, dependencies=None, + relative_path=None, known_ddts=None, var_dict=None, + module=None, parse_object=None) #print("The exception is", context.exception) emsg = "MetadataTable requires a name" @@ -299,11 +308,12 @@ def test_no_module(self): def test_bad_1st_ccpp_arg_table(self): """Test that first arg table named ccpp-farg-table returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_1st_arg_table_header.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_bad_1st_arg_table_header.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid variable property syntax, '[ccpp-farg-table]', at " @@ -312,24 +322,27 @@ def test_bad_1st_ccpp_arg_table(self): def test_bad_2nd_ccpp_arg_table(self): """Test that second arg table named ccpp-farg-table returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_2nd_arg_table_header.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_bad_2nd_arg_table_header.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid variable property syntax, '[ccpp-farg-table]', at " self.assertTrue(emsg in str(context.exception)) def test_mismatch_section_table_title(self): - """Test that mismatched section name and table title returns expected error""" + """Test that mismatched section name and table title + returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_mismatch_section_table_title.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_mismatch_section_table_title.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Section name, 'test_host', does not match table title, 'banana', at " @@ -338,11 +351,12 @@ def test_mismatch_section_table_title(self): def test_double_table_properties(self): """Test that duplicate ccpp-table-properties returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "double_table_properties.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "double_table_properties.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Duplicate metadata table, test_host, at " @@ -351,11 +365,12 @@ def test_double_table_properties(self): def test_missing_table_properties(self): """Test that a missing ccpp-table-properties returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "missing_table_properties.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "missing_table_properties.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid CCPP metadata line, '[ccpp-arg-table]', at " @@ -364,10 +379,11 @@ def test_missing_table_properties(self): def test_dependencies_rel_path(self): """Test that relative_path and dependencies from ccpp-table-properties are read in correctly""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_dependencies_rel_path.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_dependencies_rel_path.meta") - result = parse_metadata_file(filename, known_ddts, logger) + result = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) dependencies = result[0].dependencies rel_path = result[0].relative_path @@ -387,11 +403,12 @@ def test_dependencies_rel_path(self): def test_invalid_table_properties_type(self): """Test that an invalid ccpp-table-properties type returns expected error""" known_ddts = list() - logger = None - filename = os.path.join(SAMPLE_FILES_DIR, "test_invalid_table_properties_type.meta") + filename = os.path.join(SAMPLE_FILES_DIR, + "test_invalid_table_properties_type.meta") with self.assertRaises(Exception) as context: - tables = parse_metadata_file(filename, known_ddts, logger) + tables = parse_metadata_file(filename, known_ddts, + self._DUMMY_RUN_ENV) #print("The exception is", context.exception) emsg = "Invalid metadata table type, 'banana', at " diff --git a/test/unit_tests/test_var_transforms.py b/test/unit_tests/test_var_transforms.py new file mode 100644 index 00000000..5178ce9b --- /dev/null +++ b/test/unit_tests/test_var_transforms.py @@ -0,0 +1,424 @@ +#! /usr/bin/env python3 +""" +----------------------------------------------------------------------- + Description: Contains unit tests for variable transforms involving + a VarCompatObj object + + Assumptions: + + Command line arguments: none + + Usage: python test_var_transform.py # run the unit tests +----------------------------------------------------------------------- +""" +import sys +import os +import unittest + +TEST_DIR = os.path.dirname(os.path.abspath(__file__)) +SCRIPTS_DIR = os.path.abspath(os.path.join(TEST_DIR, os.pardir, os.pardir, "scripts")) +SAMPLE_FILES_DIR = os.path.join(TEST_DIR, "sample_files") + +if not os.path.exists(SCRIPTS_DIR): + raise ImportError("Cannot find scripts directory") + +sys.path.append(SCRIPTS_DIR) + +# pylint: disable=wrong-import-position +from framework_env import CCPPFrameworkEnv +from metavar import Var +from parse_tools import ParseContext, ParseSource, ParseSyntaxError +from var_props import VarCompatObj +# pylint: enable=wrong-import-position + +class VarCompatTestCase(unittest.TestCase): + + """Tests for variable transforms.""" + + def _new_var(self, standard_name, units, dimensions, vtype, vkind=''): + """Create and return a new Var object with the requested properties""" + context = ParseContext(linenum=self.__linenum, filename="foo.meta") + source = ParseSource("foo", "host", context) + prop_dict = {'local_name' : f"foo{self.__linenum}", + 'standard_name' : standard_name, + 'units' : units, + 'dimensions' : f"({', '.join(dimensions)})", + 'type' : vtype, 'kind' : vkind} + self.__linenum += 5 + return Var(prop_dict, source, self.__run_env) + + def setUp(self): + """Setup variables for testing""" + self.__run_env = CCPPFrameworkEnv(None, ndict={'host_files':'', + 'scheme_files':'foo.meta', + 'suites':''}, + kind_types=["kind_phys=REAL64", + "kind_dyn=REAL32", + "kind_host=REAL64"]) + # For making variables unique + self.__linenum = 2 + # For assert messages + self.__inst_emsg = "Var.compatible returned a '{}', not a VarCompatObj" + + def test_equiv_vars(self): + """Test that equivalent variables are reported as equivalent""" + int_scalar1 = self._new_var('int_stdname1', 'm s-1', [], 'integer') + int_array1 = self._new_var('int_stdname2', 'm s-1', ['hdim'], + 'real', vkind='kind_phys') + int_array2 = self._new_var('int_stdname2', 'm s-1', ['hdim'], + 'real', vkind='kind_host') + int_array3 = self._new_var('int_stdname2', 'm s-1', ['hdim'], + 'real', vkind='REAL64') + compat = int_scalar1.compatible(int_scalar1, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertTrue(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertFalse(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertFalse(compat.has_unit_transforms) + compat = int_array1.compatible(int_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertTrue(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertFalse(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertFalse(compat.has_unit_transforms) + compat = int_array3.compatible(int_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertTrue(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertFalse(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertFalse(compat.has_unit_transforms) + + def test_incompatible_vars(self): + """Test that incompatible variables are reported correctly""" + int_scalar1 = self._new_var('int_stdname1', 'm s-1', [], 'integer') + int_scalar2 = self._new_var('int_stdname2', 'm s-1', [], 'integer') + int_array1 = self._new_var('int_stdname1', 'm s-1', ['hdim'], + 'integer') + real_array1 = self._new_var('int_stdname1', 'm s-1', ['hdim'], + 'real', vkind='kind_phys') + # Array and scalar + compat = int_scalar1.compatible(int_array1, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertFalse(compat.compat) + self.assertEqual(compat.incompat_reason, 'dimensions') + # Variables with different standard names + compat = int_scalar1.compatible(int_scalar2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertFalse(compat.compat) + self.assertEqual(compat.incompat_reason, 'standard names') + # Variables with different types + compat = int_array1.compatible(real_array1, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertFalse(compat.compat) + self.assertEqual(compat.incompat_reason, 'types') + + def test_valid_unit_change(self): + """Test that valid unit changes are detected""" + real_scalar1 = self._new_var('real_stdname1', 'm', [], + 'real', vkind='kind_phys') + real_scalar2 = self._new_var('real_stdname1', 'mm', [], + 'real', vkind='kind_phys') + compat = real_scalar1.compatible(real_scalar2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertFalse(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertTrue(compat.has_unit_transforms) + + real_array1 = self._new_var('real_stdname1', 'm s-1', ['hdim', 'vdim'], + 'real', vkind='kind_phys') + real_array2 = self._new_var('real_stdname1', 'km h-1', ['hdim', 'vdim'], + 'real', vkind='kind_phys') + compat = real_scalar1.compatible(real_scalar2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertFalse(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertTrue(compat.has_unit_transforms) + + def test_unsupported_unit_change(self): + """Test that unsupported unit changes are detected""" + real_scalar1 = self._new_var('real_stdname1', 'min', [], + 'real', vkind='kind_phys') + real_scalar2 = self._new_var('real_stdname1', 'd', [], + 'real', vkind='kind_phys') + with self.assertRaises(ParseSyntaxError) as context: + compat = real_scalar1.compatible(real_scalar2, self.__run_env) + # end with + #Verify correct error message returned + emsg = "Unsupported unit conversion, 'min' to 'd' for 'real_stdname1'" + self.assertTrue(emsg in str(context.exception)) + + def test_valid_kind_change(self): + """Test that valid kind changes are detected""" + real_scalar1 = self._new_var('real_stdname1', 'mm', [], + 'real', vkind='kind_phys') + real_scalar2 = self._new_var('real_stdname1', 'mm', [], + 'real', vkind='kind_dyn') + compat = real_scalar1.compatible(real_scalar2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertTrue(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertFalse(compat.has_unit_transforms) + + real_scalar1 = self._new_var('real_stdname1', 'm', [], + 'real', vkind='kind_phys') + real_scalar2 = self._new_var('real_stdname1', 'mm', [], + 'real', vkind='REAL32') + compat = real_scalar1.compatible(real_scalar2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertTrue(compat.has_kind_transforms) + self.assertFalse(compat.has_dim_transforms) + self.assertTrue(compat.has_unit_transforms) + + def test_valid_dim_change(self): + """Test that valid dimension changes are detected""" + real_array1 = self._new_var('real_stdname1', 'C', + ['horizontal_dimension', + 'vertical_layer_dimension'], + 'real', vkind='kind_phys') + real_array2 = self._new_var('real_stdname1', 'K', + ['ccpp_constant_one:horizontal_loop_extent', + 'vertical_layer_dimension'], + 'real', vkind='kind_dyn') + compat = real_array1.compatible(real_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertTrue(compat.has_kind_transforms) + self.assertTrue(compat.has_dim_transforms) + self.assertTrue(compat.has_unit_transforms) + + real_array1 = self._new_var('real_stdname1', 'C', + ['ccpp_constant_one:horizontal_dimension', + 'vertical_layer_dimension'], + 'real', vkind='kind_phys') + real_array2 = self._new_var('real_stdname1', 'K', + ['vertical_layer_dimension', + 'horizontal_loop_extent'], + 'real', vkind='kind_dyn') + compat = real_array1.compatible(real_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + self.assertFalse(compat) + self.assertTrue(compat.compat) + self.assertEqual(compat.incompat_reason, '') + self.assertTrue(compat.has_kind_transforms) + self.assertTrue(compat.has_dim_transforms) + self.assertTrue(compat.has_unit_transforms) + + def test_valid_dim_transforms(self): + """Test that valid variable transform code is created""" + real_array1 = self._new_var('real_stdname1', 'C', + ['horizontal_dimension', + 'vertical_layer_dimension'], + 'real', vkind='kind_phys') + real_array2 = self._new_var('real_stdname1', 'C', + ['ccpp_constant_one:horizontal_loop_extent', + 'vertical_layer_dimension'], + 'real', vkind='kind_phys') + real_array3 = self._new_var('real_stdname1', 'K', + ['ccpp_constant_one:horizontal_loop_extent', + 'vertical_layer_dimension'], + 'real', vkind='kind_phys') + real_array4 = self._new_var('real_stdname1', 'K', + ['ccpp_constant_one:horizontal_loop_extent', + 'vertical_layer_dimension'], + 'real', vkind='kind_dyn') + real_array5 = self._new_var('real_stdname1', 'K', + ['vertical_layer_dimension', + 'ccpp_constant_one:horizontal_dimension'], + 'real', vkind='kind_phys') + v1_lname = real_array1.get_prop_value('local_name') + v2_lname = real_array2.get_prop_value('local_name') + v3_lname = real_array3.get_prop_value('local_name') + v4_lname = real_array4.get_prop_value('local_name') + v5_lname = real_array5.get_prop_value('local_name') + # Comparison between equivalent variables + compat = real_array1.compatible(real_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + fwd_stmt = compat.forward_transform(v2_lname, v1_lname, rindices, + adjust_hdim=None, flip_vdim=None) + ind_str = ','.join(rindices) + expected = f"{v2_lname}({ind_str}) = {v1_lname}({ind_str})" + self.assertEqual(fwd_stmt, expected) + rev_stmt = compat.reverse_transform(v1_lname, v2_lname, rindices, + adjust_hdim=None, flip_vdim=None) + expected = f"{v1_lname}({ind_str}) = {v2_lname}({ind_str})" + self.assertEqual(rev_stmt, expected) + + # Comparison between equivalent variables with loop correction + compat = real_array1.compatible(real_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + lindices = ("hind-col_start+1", "vind") + fwd_stmt = compat.forward_transform(v2_lname, v1_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lind_str = ','.join(lindices) + rind_str = ','.join(rindices) + expected = f"{v2_lname}({lind_str}) = {v1_lname}({rind_str})" + self.assertEqual(fwd_stmt, expected) + rev_stmt = compat.reverse_transform(v1_lname, v2_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lindices = ("hind+col_start-1", "vind") + lind_str = ','.join(lindices) + expected = f"{v1_lname}({lind_str}) = {v2_lname}({rind_str})" + self.assertEqual(rev_stmt, expected) + + # Comparison between equivalent variables with loop correction + # plus vertical flip + compat = real_array1.compatible(real_array2, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + lindices = ("hind-col_start+1", "pver-vind+1") + fwd_stmt = compat.forward_transform(v2_lname, v1_lname, rindices, + adjust_hdim='col_start', + flip_vdim='pver') + lind_str = ','.join(lindices) + rind_str = ','.join(rindices) + expected = f"{v2_lname}({lind_str}) = {v1_lname}({rind_str})" + self.assertEqual(fwd_stmt, expected) + rev_stmt = compat.reverse_transform(v1_lname, v2_lname, rindices, + adjust_hdim='col_start', + flip_vdim='pver') + lindices = ("hind+col_start-1", "pver-vind+1") + lind_str = ','.join(lindices) + expected = f"{v1_lname}({lind_str}) = {v2_lname}({rind_str})" + self.assertEqual(rev_stmt, expected) + + # Comparison between variables with different units + compat = real_array1.compatible(real_array3, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + lindices = ("hind-col_start+1", "vind") + conv = f"273.15_{real_array1.get_prop_value('kind')}" + fwd_stmt = compat.forward_transform(v3_lname, v1_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lind_str = ','.join(lindices) + rind_str = ','.join(rindices) + expected = f"{v3_lname}({lind_str}) = {v1_lname}({rind_str})+{conv}" + self.assertEqual(fwd_stmt, expected) + rev_stmt = compat.reverse_transform(v1_lname, v3_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lindices = ("hind+col_start-1", "vind") + lind_str = ','.join(lindices) + conv = f"273.15_{real_array2.get_prop_value('kind')}" + expected = f"{v1_lname}({lind_str}) = {v3_lname}({rind_str})-{conv}" + self.assertEqual(rev_stmt, expected) + + # Comparison between variables with different kind + compat = real_array4.compatible(real_array3, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + lindices = ("hind", "vind") + fwd_stmt = compat.forward_transform(v4_lname, v3_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lind_str = ','.join(lindices) + rind_str = ','.join(rindices) + rkind = real_array3.get_prop_value('kind') + expected = f"{v4_lname}({lind_str}) = real({v3_lname}({rind_str}), {rkind})" + self.assertEqual(fwd_stmt, expected) + rev_stmt = compat.reverse_transform(v3_lname, v4_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lindices = ("hind", "vind") + lind_str = ','.join(lindices) + rkind = real_array4.get_prop_value('kind') + expected = f"{v3_lname}({lind_str}) = real({v4_lname}({rind_str}), {rkind})" + self.assertEqual(rev_stmt, expected) + + # Comparison between variables with different units and kind + compat = real_array1.compatible(real_array4, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + lindices = ("hind-col_start+1", "vind") + rkind = real_array4.get_prop_value('kind') + conv = f"273.15_{rkind}" + fwd_stmt = compat.forward_transform(v2_lname, v1_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lind_str = ','.join(lindices) + rind_str = ','.join(rindices) + expected = f"{v2_lname}({lind_str}) = real({v1_lname}({rind_str}), {rkind})+{conv}" + self.assertEqual(fwd_stmt, expected) + rev_stmt = compat.reverse_transform(v1_lname, v2_lname, rindices, + adjust_hdim='col_start', + flip_vdim=None) + lindices = ("hind+col_start-1", "vind") + lind_str = ','.join(lindices) + rkind = real_array1.get_prop_value('kind') + conv = f"273.15_{rkind}" + expected = f"{v1_lname}({lind_str}) = real({v2_lname}({rind_str}), {rkind})-{conv}" + self.assertEqual(rev_stmt, expected) + + # Comparison between variables with different dimension ordering + # and horizontal loop adjustment and vertical flip + compat = real_array5.compatible(real_array3, self.__run_env) + self.assertIsInstance(compat, VarCompatObj, + msg=self.__inst_emsg.format(type(compat))) + rindices = ("hind", "vind") + lindices = ("pver-vind+1", "hind-col_start+1") + fwd_stmt = compat.forward_transform(v4_lname, v5_lname, rindices, + adjust_hdim='col_start', + flip_vdim='pver') + lind_str = ','.join(lindices) + rind_str = ','.join(rindices) + rkind = real_array3.get_prop_value('kind') + expected = f"{v4_lname}({lind_str}) = {v5_lname}({rind_str})" + self.assertEqual(fwd_stmt, expected) + rindices = ("vind", "hind") + rind_str = ','.join(rindices) + rev_stmt = compat.reverse_transform(v5_lname, v4_lname, rindices, + adjust_hdim='col_start', + flip_vdim='pver') + lindices = ("hind+col_start-1", "pver-vind+1") + lind_str = ','.join(lindices) + rkind = real_array4.get_prop_value('kind') + expected = f"{v5_lname}({lind_str}) = {v4_lname}({rind_str})" + self.assertEqual(rev_stmt, expected) + +if __name__ == '__main__': + unittest.main() From bbb668c5cdaac8a647d2e3d8c0783438e68f6a54 Mon Sep 17 00:00:00 2001 From: Steve Goldhaber <goldy@ucar.edu> Date: Wed, 27 Oct 2021 12:11:18 -0600 Subject: [PATCH 2/2] add optional var test, doctest cleanup, and python3 checks --- scripts/ccpp_suite.py | 11 +++++-- scripts/code_block.py | 6 +++- scripts/ddt_library.py | 8 ++++-- scripts/fortran_tools/fortran_write.py | 26 ++++++++--------- scripts/fortran_tools/parse_fortran.py | 6 +++- scripts/fortran_tools/parse_fortran_file.py | 15 +++++----- scripts/host_cap.py | 8 ++++-- scripts/host_model.py | 12 ++++---- scripts/metadata_table.py | 6 +++- scripts/metavar.py | 20 +++++++++---- scripts/parse_tools/parse_checkers.py | 6 +++- scripts/parse_tools/parse_object.py | 7 ++++- scripts/parse_tools/parse_source.py | 6 +++- scripts/parse_tools/preprocess.py | 6 +++- scripts/parse_tools/xml_tools.py | 9 ++++-- scripts/state_machine.py | 7 ++++- scripts/suite_objects.py | 8 ++++-- scripts/var_props.py | 7 ++++- test/advection_test/run_test | 2 +- test/advection_test/test_reports.py | 7 ++++- test/capgen_test/run_test | 2 +- test/capgen_test/temp_adjust.F90 | 6 ++-- test/capgen_test/test_reports.py | 7 ++++- test/run_doctest.sh | 6 ++++ test/run_tests.sh | 32 +++++++++++++++++---- 25 files changed, 175 insertions(+), 61 deletions(-) diff --git a/scripts/ccpp_suite.py b/scripts/ccpp_suite.py index 4fca8e05..8b9a1aeb 100644 --- a/scripts/ccpp_suite.py +++ b/scripts/ccpp_suite.py @@ -1145,8 +1145,11 @@ def suites(self): if __name__ == "__main__": try: # First, run doctest + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() # Goal: Replace this test with a suite from unit tests FRAME_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) TEMP_SUITE = os.path.join(FRAME_ROOT, 'test', 'capgen_test', @@ -1157,6 +1160,10 @@ def suites(self): _API_DUMMY_RUN_ENV) else: print("Cannot find test file, '{}', skipping test".format(TEMP_SUITE)) + # end if + sys.exit(fail) except CCPPError as suite_error: print("{}".format(suite_error)) -# end if (no else) + sys.exit(fail) + # end try +# end if diff --git a/scripts/code_block.py b/scripts/code_block.py index 391247e3..96dc30e9 100644 --- a/scripts/code_block.py +++ b/scripts/code_block.py @@ -114,11 +114,15 @@ def write(self, outfile, indent_level, var_dict): # pylint: disable=ungrouped-imports import doctest import os + import sys from fortran_tools import FortranWriter + # pylint: enable=ungrouped-imports outfile_name = "__code_block_temp.F90" with FortranWriter(outfile_name, 'w', 'test file', 'test_mod') as outfile: - doctest.testmod() + fail, _ = doctest.testmod() # end with if os.path.exists(outfile_name): os.remove(outfile_name) # end if + sys.exit(fail) +# end if diff --git a/scripts/ddt_library.py b/scripts/ddt_library.py index 876d2294..30614226 100644 --- a/scripts/ddt_library.py +++ b/scripts/ddt_library.py @@ -8,7 +8,6 @@ """ # Python library imports -from __future__ import print_function import logging # CCPP framework imports from parse_tools import ParseInternalError, CCPPError, context_string @@ -328,5 +327,10 @@ def run_env(self): ############################################################################### if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/fortran_tools/fortran_write.py b/scripts/fortran_tools/fortran_write.py index ab8c9d30..a238dcc5 100644 --- a/scripts/fortran_tools/fortran_write.py +++ b/scripts/fortran_tools/fortran_write.py @@ -4,10 +4,6 @@ """Code to write Fortran code """ -# Python library imports -from __future__ import print_function -# CCPP framework imports - class FortranWriter(object): """Class to turn output into properly continued and indented Fortran code >>> FortranWriter("foo.F90", 'r', 'test', 'mod_name') #doctest: +IGNORE_EXCEPTION_DETAIL @@ -259,17 +255,18 @@ def copyright(cls): ############################################################################### if __name__ == "__main__": # First, run doctest + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() - # Make sure we can write a file - import sys import os - import os.path + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + # Make sure we can write a file sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) NAME = 'foo' while os.path.exists(NAME+'.F90'): NAME = NAME + 'xo' - # End while + # end while NAME = NAME + '.F90' if os.access(os.getcwd(), os.W_OK): _CHECK = FortranWriter.copyright().split('\n') @@ -301,11 +298,12 @@ def copyright(cls): print(EMSG.format(_line_num+1)) print("{}".format(_statement.rstrip())) print("{}".format(_CHECK[_line_num])) - # End if - # End for - # End with + # end if + # end for + # end with os.remove(NAME) else: print("WARNING: Unable to write test file, '{}'".format(NAME)) - # End if -# No else + # end if + sys.exit(fail) +# end if diff --git a/scripts/fortran_tools/parse_fortran.py b/scripts/fortran_tools/parse_fortran.py index 2556e60f..624f02cb 100644 --- a/scripts/fortran_tools/parse_fortran.py +++ b/scripts/fortran_tools/parse_fortran.py @@ -828,9 +828,13 @@ def parse_fortran_var_decl(line, source, run_env): ######################################################################## if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest + # pylint: enable=ungrouped-imports from framework_env import CCPPFrameworkEnv _DUMMY_RUN_ENV = CCPPFrameworkEnv(None, ndict={'host_files':'', 'scheme_files':'', 'suites':''}) - doctest.testmod() + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/fortran_tools/parse_fortran_file.py b/scripts/fortran_tools/parse_fortran_file.py index 4054a85e..9082f614 100644 --- a/scripts/fortran_tools/parse_fortran_file.py +++ b/scripts/fortran_tools/parse_fortran_file.py @@ -959,11 +959,11 @@ def parse_fortran_file(filename, run_env): ######################################################################## if __name__ == "__main__": -# pylint: disable=ungrouped-imports + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + fail, _ = doctest.testmod() from parse_tools import register_fortran_ddt_name -# pylint: enable=ungrouped-imports + # pylint: enable=ungrouped-imports _FPATH = '/Users/goldy/scratch/foo' _FNAMES = ['GFS_PBL_generic.F90', 'GFS_rad_time_vary.fv3.F90', 'GFS_typedefs.F90'] @@ -975,7 +975,8 @@ def parse_fortran_file(filename, run_env): mh = parse_fortran_file(fpathname, preproc_defs={'CCPP':1}) for header in mheader: print('{}: {}'.format(fname, h)) - # End for - # End if - # End for -# End if + # end for + # end if + # end for + sys.exit(fail) +# end if diff --git a/scripts/host_cap.py b/scripts/host_cap.py index 2408209d..9c88cf34 100644 --- a/scripts/host_cap.py +++ b/scripts/host_cap.py @@ -579,6 +579,10 @@ def write_host_cap(host_model, api, output_dir, run_env): _LOGGER = init_log('host_registry') set_log_to_null(_LOGGER) # Run doctest + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() -# No else: + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/host_model.py b/scripts/host_model.py index cd0822a1..eae2479d 100644 --- a/scripts/host_model.py +++ b/scripts/host_model.py @@ -4,8 +4,6 @@ Parse a host-model registry XML file and return the captured variables. """ -# Python library imports -from __future__ import print_function # CCPP framework imports from metavar import VarDictionary from ddt_library import VarDDT, DDTLibrary @@ -304,10 +302,14 @@ def call_list(self, phase): ############################################################################### if __name__ == "__main__": + # pylint: disable=ungrouped-imports from parse_tools import init_log, set_log_to_null + import doctest + import sys + # pylint: enable=ungrouped-imports _LOGGER = init_log('host_registry') set_log_to_null(_LOGGER) # First, run doctest - import doctest - doctest.testmod() -# No else: + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/metadata_table.py b/scripts/metadata_table.py index 55f97266..86397386 100644 --- a/scripts/metadata_table.py +++ b/scripts/metadata_table.py @@ -1269,9 +1269,13 @@ def is_scalar_reference(test_val): ######################################################################## if __name__ == "__main__": +# pylint: enable=ungrouped-imports import doctest + import sys +# pylint: disable=ungrouped-imports from framework_env import CCPPFrameworkEnv _DUMMY_RUN_ENV = CCPPFrameworkEnv(None, {'host_files':'', 'scheme_files':'', 'suites':''}) - doctest.testmod() + fail, _ = doctest.testmod() + sys.exit(fail) diff --git a/scripts/metavar.py b/scripts/metavar.py index 574c89ae..a66ff327 100755 --- a/scripts/metavar.py +++ b/scripts/metavar.py @@ -805,14 +805,14 @@ def parent(self, parent_var): # end if if isinstance(parent_var, Var): self.__parent_var = parent_var - parent_var._add_child(self) + parent_var.add_child(self) else: emsg = 'Attempting to set parent for {}, bad parent type, {}' lname = self.get_prop_value('local_name') raise ParseInternalError(emsg.format(lname, type(parent_var))) # end if - def _add_child(self, cvar): + def add_child(self, cvar): """Add <cvar> as a child of this Var object""" if cvar not in self.__children: self.__children.append(cvar) @@ -824,8 +824,8 @@ def children(self): children = self.__children if not children: pvar = self - while (not children) and pvar.__clone_source: - pvar = pvar.__clone_source + while (not children) and pvar.clone_source: + pvar = pvar.clone_source children = pvar.children() # end while # end if @@ -853,6 +853,11 @@ def source(self, new_source): raise ParseInternalError(errmsg.format(stdname, lname, new_source)) # end if + @property + def clone_source(self): + """Return this Var object's clone source (or None)""" + return self.__clone_source + @property def host_interface_var(self): """True iff self is included in the host model interface calls""" @@ -1965,5 +1970,10 @@ def new_internal_variable_name(self, prefix=None, max_len=63): ############################################################################### if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/parse_tools/parse_checkers.py b/scripts/parse_tools/parse_checkers.py index a3b4f185..487478e6 100755 --- a/scripts/parse_tools/parse_checkers.py +++ b/scripts/parse_tools/parse_checkers.py @@ -1023,5 +1023,9 @@ def register_fortran_ddt_name(name): ######################################################################## if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/parse_tools/parse_object.py b/scripts/parse_tools/parse_object.py index bf489de8..4518d75c 100644 --- a/scripts/parse_tools/parse_object.py +++ b/scripts/parse_tools/parse_object.py @@ -172,5 +172,10 @@ def __del__(self): ######################################################################## if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/parse_tools/parse_source.py b/scripts/parse_tools/parse_source.py index dd57b4ec..6d28b694 100644 --- a/scripts/parse_tools/parse_source.py +++ b/scripts/parse_tools/parse_source.py @@ -415,5 +415,9 @@ def context(self): ######################################################################## if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/parse_tools/preprocess.py b/scripts/parse_tools/preprocess.py index 96a52089..06b94147 100755 --- a/scripts/parse_tools/preprocess.py +++ b/scripts/parse_tools/preprocess.py @@ -416,6 +416,10 @@ def is_preproc_line(line): ######################################################################## if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) # end if diff --git a/scripts/parse_tools/xml_tools.py b/scripts/parse_tools/xml_tools.py index 46156c64..414ffc5a 100644 --- a/scripts/parse_tools/xml_tools.py +++ b/scripts/parse_tools/xml_tools.py @@ -356,8 +356,13 @@ def write(self, file, encoding="us-ascii", xml_declaration=None, set_log_to_null(_LOGGER) try: # First, run doctest + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) except CCPPError as cerr: print("{}".format(cerr)) -# No else: + sys.exit(fail) + # end try +# end if diff --git a/scripts/state_machine.py b/scripts/state_machine.py index f802d7bd..966ad04f 100644 --- a/scripts/state_machine.py +++ b/scripts/state_machine.py @@ -183,5 +183,10 @@ def __repr__(self): ############################################################################### if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/suite_objects.py b/scripts/suite_objects.py index caa19ec8..472556cb 100644 --- a/scripts/suite_objects.py +++ b/scripts/suite_objects.py @@ -1903,6 +1903,10 @@ def suite_dicts(self): if __name__ == "__main__": # First, run doctest + # pylint: disable=ungrouped-imports import doctest - doctest.testmod() -# end if (no else) + import sys + # pylint: enable=ungrouped-imports + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/scripts/var_props.py b/scripts/var_props.py index 958c98e0..b7d7b16f 100644 --- a/scripts/var_props.py +++ b/scripts/var_props.py @@ -1271,8 +1271,11 @@ def __bool__(self): ############################################################################### if __name__ == "__main__": + # pylint: disable=ungrouped-imports import doctest + import sys from parse_tools import init_log, set_log_to_null + # pylint: enable=ungrouped-imports _DOCTEST_LOGGING = init_log('var_props') set_log_to_null(_DOCTEST_LOGGING) _DOCTEST_RUNENV = CCPPFrameworkEnv(_DOCTEST_LOGGING, @@ -1286,4 +1289,6 @@ def __bool__(self): "m", [], "var1_lname", "var_stdname", "real", "kind_phys", "m", [], "var2_lname", _DOCTEST_RUNENV) - doctest.testmod() + fail, _ = doctest.testmod() + sys.exit(fail) +# end if diff --git a/test/advection_test/run_test b/test/advection_test/run_test index d1cbed2b..b014470a 100755 --- a/test/advection_test/run_test +++ b/test/advection_test/run_test @@ -199,7 +199,7 @@ fi report_prog="${framework}/scripts/ccpp_datafile.py" datafile="${build_dir}/ccpp/datatable.xml" echo "Running python interface tests" -python ${scriptdir}/test_reports.py ${build_dir} ${datafile} +python3 ${scriptdir}/test_reports.py ${build_dir} ${datafile} res=$? if [ $res -ne 0 ]; then perr "python interface tests failed" diff --git a/test/advection_test/test_reports.py b/test/advection_test/test_reports.py index 00e490a6..c28fe38a 100644 --- a/test/advection_test/test_reports.py +++ b/test/advection_test/test_reports.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ ----------------------------------------------------------------------- Description: Test advection database report python interface @@ -21,6 +21,11 @@ raise ImportError("Cannot find scripts directory") # end if +if ((sys.version_info[0] < 3) or + (sys.version_info[0] == 3) and (sys.version_info[1] < 7)): + raise Exception("Python 3.7 or greater required") +# end if + sys.path.append(_SCRIPTS_DIR) # pylint: disable=wrong-import-position from ccpp_datafile import datatable_report, DatatableReport diff --git a/test/capgen_test/run_test b/test/capgen_test/run_test index 85f10282..4dfd490f 100755 --- a/test/capgen_test/run_test +++ b/test/capgen_test/run_test @@ -218,7 +218,7 @@ fi report_prog="${framework}/scripts/ccpp_datafile.py" datafile="${build_dir}/ccpp/datatable.xml" echo "Running python interface tests" -python ${scriptdir}/test_reports.py ${build_dir} ${datafile} +python3 ${scriptdir}/test_reports.py ${build_dir} ${datafile} res=$? if [ $res -ne 0 ]; then perr "python interface tests failed" diff --git a/test/capgen_test/temp_adjust.F90 b/test/capgen_test/temp_adjust.F90 index 52379e22..356a86d1 100644 --- a/test/capgen_test/temp_adjust.F90 +++ b/test/capgen_test/temp_adjust.F90 @@ -27,7 +27,7 @@ subroutine temp_adjust_run(foo, timestep, temp_prev, temp_layer, qv, ps, & REAL(kind_phys), intent(in) :: temp_prev(:) REAL(kind_phys), intent(inout) :: temp_layer(foo) character(len=512), intent(out) :: errmsg - integer, intent(out) :: errflg + integer, optional, intent(out) :: errflg real(kind_phys), optional, intent(in) :: innie real(kind_phys), optional, intent(out) :: outie real(kind_phys), optional, intent(inout) :: optsie @@ -36,7 +36,9 @@ subroutine temp_adjust_run(foo, timestep, temp_prev, temp_layer, qv, ps, & integer :: col_index errmsg = '' - errflg = 0 + if (present(errflg)) then + errflg = 0 + end if do col_index = 1, foo temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) diff --git a/test/capgen_test/test_reports.py b/test/capgen_test/test_reports.py index 45e7c14d..3749e8ac 100644 --- a/test/capgen_test/test_reports.py +++ b/test/capgen_test/test_reports.py @@ -1,4 +1,4 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ ----------------------------------------------------------------------- Description: Test capgen database report python interface @@ -22,6 +22,11 @@ raise ImportError("Cannot find scripts directory") # end if +if ((sys.version_info[0] < 3) or + (sys.version_info[0] == 3) and (sys.version_info[1] < 7)): + raise Exception("Python 3.7 or greater required") +# end if + sys.path.append(_SCRIPTS_DIR) # pylint: disable=wrong-import-position from ccpp_datafile import datatable_report, DatatableReport diff --git a/test/run_doctest.sh b/test/run_doctest.sh index 1eecb32f..aeecb133 100755 --- a/test/run_doctest.sh +++ b/test/run_doctest.sh @@ -18,12 +18,18 @@ perr() { cd ${scripts} perr $? "Cannot cd to scripts directory, '${scripts}'" +errcnt=0 + export PYTHONPATH="${scripts}:${PYTHONPATH}" # Find all python scripts that have doctest for pyfile in $(find . -name \*.py); do if [ -f "${pyfile}" ]; then if [ $(grep -c doctest ${pyfile}) -ne 0 ]; then python3 ${pyfile} + res=$? + errcnt=$((errcnt + res)) fi fi done + +exit ${errcnt} diff --git a/test/run_tests.sh b/test/run_tests.sh index 83b89c73..90a3ebaa 100755 --- a/test/run_tests.sh +++ b/test/run_tests.sh @@ -19,22 +19,44 @@ perr() { cd ${test_dir} perr $? "Cannot cd to test directory, '${test_dir}'" +errcnt=0 + # Run capgen test ./capgen_test/run_test -perr $? "Failure running capgen test" +res=$? +errcnt=$((errcnt + res)) +if [ $res -ne 0 ]; then + echo "Failure running capgen test" +fi # Run advection test ./advection_test/run_test -perr $? "Failure running advection test" +res=$? +errcnt=$((errcnt + res)) +if [ $res -ne 0 ]; then + echo "Failure running advection test" +fi # Run doctests ./run_doctest.sh -perr $? "Failure running doctests" +res=$? +errcnt=$((errcnt + res)) +if [ $res -ne 0 ]; then + echo "${errcnt} doctest failures" +fi for test in `ls unit_tests/test_*.py`; do echo "Running unit test, ${test}" python3 ${test} - perr $? "Failure running unit test, ${test}" + res=$? + errcnt=$((errcnt + res)) + if [ $res -ne 0 ]; then + echo "Failure, '${res}', running unit test, ${test}" + fi done -echo "All tests PASSed!" +if [ $errcnt -eq 0 ]; then + echo "All tests PASSed!" +else + echo "${errcnt} tests FAILed" +fi