diff --git a/.gitignore b/.gitignore index 4a589524d2..42b4e94c2c 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ pip-cache # Created by Iris build *.so lib/iris/etc/site.cfg -lib/iris/fileformats/_pyke_rules/compiled_krb/ lib/iris/std_names.py # Iris test result files diff --git a/MANIFEST.in b/MANIFEST.in index 1902f6a74f..62f9dc701b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,9 +3,7 @@ include CHANGES COPYING COPYING.LESSER # Files from setup.py package_data that are not automatically added to source distributions recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json -recursive-exclude lib/iris/fileformats/_pyke_rules/compiled_krb * recursive-include lib/iris/etc * -include lib/iris/fileformats/_pyke_rules/*.krb recursive-include requirements * diff --git a/asv.conf.json b/asv.conf.json index 46cd4839f2..92634b40c5 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -20,7 +20,6 @@ "scipy": [], "setuptools": [], - "pyke": [], "six": [], "nose": [], diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 0f2340e077..896cc582f1 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -104,7 +104,7 @@ To do this perform the following steps. Create a conda environment with the appropriate conda packages to build the source distribution (``sdist``) and pure Python wheel (``bdist_wheel``):: - > conda create -n iris-pypi -c conda-forge --yes pip pyke python setuptools twine wheel + > conda create -n iris-pypi -c conda-forge --yes pip python setuptools twine wheel > . activate iris-pypi Checkout the appropriate Iris ```` tag from the appropriate ````. diff --git a/docs/src/installing.rst b/docs/src/installing.rst index b55bfa967f..1bea781e5a 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -70,7 +70,7 @@ The rest can be done with pip. Begin with numpy:: Finally, Iris and its Python dependencies can be installed with the following command:: - pip3 install setuptools cftime==1.2.1 cf-units scitools-pyke scitools-iris + pip3 install setuptools cftime==1.2.1 cf-units scitools-iris This procedure was tested on a Ubuntu 20.04 system on the 27th of January, 2021. diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py similarity index 52% rename from lib/iris/tests/unit/fileformats/pyke_rules/__init__.py rename to lib/iris/fileformats/_nc_load_rules/__init__.py index 71d129e4a7..b102a082df 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -3,4 +3,10 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris.fileformats._pyke_rules` module.""" +""" +Support for cube-specific CF-to-Iris translation operations. + +Interprets CF concepts identified by :mod:`iris.fileformats.cf` to add +components into loaded cubes. + +""" diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py new file mode 100644 index 0000000000..5813c5bca7 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -0,0 +1,557 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Replacement code for the Pyke rules. + +For now, we are still emulating various aspects of how our original Pyke-based +code used the Pyke 'engine' to hold translation data, both Pyke-specific and +not : +1) basic details from the iris.fileformats.cf analysis of the file are + recorded before translating each output cube, using + "engine.assert_case_specific_fact(name, args)". + +2) this is also used to store intermediate info passed between rules, which + used to be done with a "facts_cf.provides" statement in rule actions. + +3) Iris-specific info is (still) stored in additional properties created on + the engine object : + engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + +Our "rules" are just action routines. +The top-level 'run_actions' routine decides which actions to call, based on the +info recorded when processing each cube output. It does this in a simple +explicit way, which doesn't use any clever chaining, "trigger conditions" or +other rule-type logic. + +Each 'action' function can replace several similar 'rules'. +E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid_mapping_'. +To aid debug, each returns a 'rule_name' string, indicating which original rule +this particular action call is emulating : In some cases, this may include a +textual note that this rule 'failed', aka "did not trigger", which would not be +recorded in the original implementation. + +TODO: remove the use of intermediate "facts" to carry information between +actions. This mimics older behaviour, so is still useful while we are still +comparing behaviour with the old Pyke rules (debugging). But once that is no +longer useful, this can be considerably simplified. + +""" + +from functools import wraps +import warnings + +import iris.fileformats.cf +import iris.fileformats.pp as pp + +from . import helpers as hh + + +def _default_rulenamesfunc(func_name): + # A simple default function to deduce the rules-name from an action-name. + funcname_prefix = "action_" + rulename_prefix = "fc_" # To match existing behaviours + rule_name = func_name + if rule_name.startswith(funcname_prefix): + rule_name = rule_name[len(funcname_prefix) :] + if not rule_name.startswith(rulename_prefix): + rule_name = rulename_prefix + rule_name + return rule_name + + +def action_function(func): + # Wrap an action function with some standard behaviour. + # Notably : engages with the rules logging process. + @wraps(func) + def inner(engine, *args, **kwargs): + # Call the original rules-func + rule_name = func(engine, *args, **kwargs) + if rule_name is None: + # Work out the corresponding rule name, and log it. + # Note: an action returns a name string, which identifies it, + # but also may vary depending on whether it successfully + # triggered, and if so what it matched. + rule_name = _default_rulenamesfunc(func.__name__) + engine.rule_triggered.add(rule_name) + + func._rulenames_func = _default_rulenamesfunc + return inner + + +@action_function +def action_default(engine): + """Standard operations for every cube.""" + hh.build_cube_metadata(engine) + + +# Lookup table used by 'action_provides_grid_mapping'. +# Maps each supported CF grid-mapping-name to a pair of handling ("helper") +# routines: +# (@0) a validity-checker (or None) +# (@1) a coord-system builder function. +_GRIDTYPE_CHECKER_AND_BUILDER = { + hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), + hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( + None, + hh.build_rotated_coordinate_system, + ), + hh.CF_GRID_MAPPING_MERCATOR: ( + hh.has_supported_mercator_parameters, + hh.build_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_TRANSVERSE: ( + None, + hh.build_transverse_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_STEREO: ( + hh.has_supported_stereographic_parameters, + hh.build_stereographic_coordinate_system, + ), + hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL: ( + None, + hh.build_lambert_conformal_coordinate_system, + ), + hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL: ( + None, + hh.build_lambert_azimuthal_equal_area_coordinate_system, + ), + hh.CF_GRID_MAPPING_ALBERS: ( + None, + hh.build_albers_equal_area_coordinate_system, + ), + hh.CF_GRID_MAPPING_VERTICAL: ( + None, + hh.build_vertical_perspective_coordinate_system, + ), + hh.CF_GRID_MAPPING_GEOSTATIONARY: ( + None, + hh.build_geostationary_coordinate_system, + ), +} + + +@action_function +def action_provides_grid_mapping(engine, gridmapping_fact): + """Convert a CFGridMappingVariable into a cube coord-system.""" + (var_name,) = gridmapping_fact + rule_name = "fc_provides_grid_mapping" + cf_var = engine.cf_var.cf_group[var_name] + grid_mapping_type = getattr(cf_var, hh.CF_ATTR_GRID_MAPPING_NAME, None) + + succeed = True + if grid_mapping_type is None: + succeed = False + rule_name += " --FAILED(no grid-mapping attr)" + else: + grid_mapping_type = grid_mapping_type.lower() + + if succeed: + if grid_mapping_type in _GRIDTYPE_CHECKER_AND_BUILDER: + checker, builder = _GRIDTYPE_CHECKER_AND_BUILDER[grid_mapping_type] + rule_name += f"_({grid_mapping_type})" + else: + succeed = False + rule_name += f" --FAILED(unhandled type {grid_mapping_type})" + + if succeed: + if checker is not None and not checker(engine, var_name): + succeed = False + rule_name += f" --(FAILED check {checker.__name__})" + + if succeed: + coordinate_system = builder(engine, cf_var) + engine.cube_parts["coordinate_system"] = coordinate_system + + # Check there is not an existing one. + # ATM this is guaranteed by the caller, "run_actions". + assert engine.fact_list("grid-type") == [] + + engine.add_fact("grid-type", (grid_mapping_type,)) + + return rule_name + + +@action_function +def action_provides_coordinate(engine, dimcoord_fact): + """Identify the coordinate 'type' of a CFCoordinateVariable.""" + (var_name,) = dimcoord_fact + + # Identify the "type" of a coordinate variable + coord_type = None + # NOTE: must test for rotated cases *first*, as 'is_longitude' and + # 'is_latitude' functions also accept rotated cases. + if hh.is_rotated_latitude(engine, var_name): + coord_type = "rotated_latitude" + elif hh.is_rotated_longitude(engine, var_name): + coord_type = "rotated_longitude" + elif hh.is_latitude(engine, var_name): + coord_type = "latitude" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" + elif hh.is_time(engine, var_name): + coord_type = "time" + elif hh.is_time_period(engine, var_name): + coord_type = "time_period" + elif hh.is_projection_x_coordinate(engine, var_name): + coord_type = "projection_x" + elif hh.is_projection_y_coordinate(engine, var_name): + coord_type = "projection_y" + + if coord_type is None: + # Not identified as a specific known coord_type. + # N.B. in the original rules, this does *not* trigger separate + # 'provides' and 'build' phases : there is just a single + # 'fc_default_coordinate' rule. + # Rationalise this for now by making it more like the others. + # FOR NOW: ~matching old code, but they could *all* be simplified. + # TODO: combine 2 operation into 1 for ALL of these. + coord_type = "miscellaneous" + rule_name = "fc_default_coordinate_(provide-phase)" + else: + rule_name = f"fc_provides_coordinate_({coord_type})" + + engine.add_fact("provides-coordinate-(oftype)", (coord_type, var_name)) + return rule_name + + +# Lookup table used by 'action_build_dimension_coordinate'. +# Maps each supported coordinate-type name (a rules-internal concept) to a pair +# of information values : +# (@0) A grid "type", one of latlon/rotated/projected (or None) +# If set, the cube should have a coord-system, which is set on the +# resulting coordinate. If None, the coord has no coord_system. +# (@1) an (optional) fixed standard-name for the coordinate, or None +# If None, the coordinate name is copied from the source variable +_COORDTYPE_GRIDTYPES_AND_COORDNAMES = { + "latitude": ("latlon", hh.CF_VALUE_STD_NAME_LAT), + "longitude": ("latlon", hh.CF_VALUE_STD_NAME_LON), + "rotated_latitude": ( + "rotated", + hh.CF_VALUE_STD_NAME_GRID_LAT, + ), + "rotated_longitude": ( + "rotated", + hh.CF_VALUE_STD_NAME_GRID_LON, + ), + "projection_x": ("projected", hh.CF_VALUE_STD_NAME_PROJ_X), + "projection_y": ("projected", hh.CF_VALUE_STD_NAME_PROJ_Y), + "time": (None, None), + "time_period": (None, None), + "miscellaneous": (None, None), +} + + +@action_function +def action_build_dimension_coordinate(engine, providescoord_fact): + """Convert a CFCoordinateVariable into a cube dim-coord.""" + coord_type, var_name = providescoord_fact + cf_var = engine.cf_var.cf_group[var_name] + rule_name = f"fc_build_coordinate_({coord_type})" + coord_grid_class, coord_name = _COORDTYPE_GRIDTYPES_AND_COORDNAMES[ + coord_type + ] + if coord_grid_class is None: + # Coordinates not identified with a specific grid-type class (latlon, + # rotated or projected) are always built, but can have no coord-system. + coord_system = None # no coord-system can be used + succeed = True + else: + grid_classes = ("latlon", "rotated", "projected") + assert coord_grid_class in grid_classes + # If a coord is of a type identified with a grid, we may have a + # coordinate system (i.e. a valid grid-mapping). + # N.B. this requires each grid-type identification to validate the + # coord var (e.g. "is_longitude"). + # Non-conforming lon/lat/projection coords will be classed as + # dim-coords by cf.py, but 'action_provides_coordinate' will give them + # a coord-type of 'miscellaneous' : hence, they have no coord-system. + coord_system = engine.cube_parts.get("coordinate_system") + # Translate the specific grid-mapping type to a grid-class + if coord_system is None: + succeed = True + cs_gridclass = None + else: + # Get a grid-class from the grid-type + # i.e. one of latlon/rotated/projected, as for coord_grid_class. + gridtypes_factlist = engine.fact_list("grid-type") + (gridtypes_fact,) = gridtypes_factlist # only 1 fact + (cs_gridtype,) = gridtypes_fact # fact contains 1 term + if cs_gridtype == "latitude_longitude": + cs_gridclass = "latlon" + elif cs_gridtype == "rotated_latitude_longitude": + cs_gridclass = "rotated" + else: + # Other specific projections + assert cs_gridtype is not None + cs_gridclass = "projected" + + assert cs_gridclass in grid_classes + (None,) + + if coord_grid_class == "latlon": + if cs_gridclass == "latlon": + succeed = True + elif cs_gridclass is None: + succeed = True + rule_name += "(no-cs)" + elif cs_gridclass == "rotated": + # We disallow this case + succeed = False + rule_name += "(FAILED : latlon coord with rotated cs)" + else: + assert cs_gridclass == "projected" + # succeed, no error, but discards the coord-system + # TODO: could issue a warning in this case ? + succeed = True + coord_system = None + rule_name += "(no-cs : discarded projected cs)" + elif coord_grid_class == "rotated": + if cs_gridclass == "rotated": + succeed = True + rule_name += "(rotated)" + elif cs_gridclass is None: + succeed = True + rule_name += "(rotated no-cs)" + elif cs_gridclass == "latlon": + # We disallow this case + succeed = False + rule_name += "(FAILED rotated coord with latlon cs)" + else: + assert cs_gridclass == "projected" + succeed = True + coord_system = None + rule_name += "(rotated no-cs : discarded projected cs)" + elif coord_grid_class == "projected": + # In this case, can *only* build a coord at all if there is a + # coord-system of the correct class (i.e. 'projected'). + succeed = cs_gridclass == "projected" + if not succeed: + rule_name += "(FAILED projected coord with non-projected cs)" + else: + # Just FYI : literally not possible, as we already asserted this. + assert coord_grid_class in grid_classes + + if succeed: + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) + return rule_name + + +@action_function +def action_build_auxiliary_coordinate(engine, auxcoord_fact): + """Convert a CFAuxiliaryCoordinateVariable into a cube aux-coord.""" + (var_name,) = auxcoord_fact + rule_name = "fc_build_auxiliary_coordinate" + + # Identify any known coord "type" : latitude/longitude/time/time_period + # If latitude/longitude, this sets the standard_name of the built AuxCoord + coord_type = "" # unidentified : can be OK + coord_name = None + if hh.is_time(engine, var_name): + coord_type = "time" + elif hh.is_time_period(engine, var_name): + coord_type = "time_period" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" + if hh.is_rotated_longitude(engine, var_name): + coord_type += "_rotated" + coord_name = hh.CF_VALUE_STD_NAME_GRID_LON + else: + coord_name = hh.CF_VALUE_STD_NAME_LON + elif hh.is_latitude(engine, var_name): + coord_type = "latitude" + if hh.is_rotated_latitude(engine, var_name): + coord_type += "_rotated" + coord_name = hh.CF_VALUE_STD_NAME_GRID_LAT + else: + coord_name = hh.CF_VALUE_STD_NAME_LAT + + if coord_type: + rule_name += f"_{coord_type}" + + cf_var = engine.cf_var.cf_group.auxiliary_coordinates[var_name] + hh.build_auxiliary_coordinate(engine, cf_var, coord_name=coord_name) + + return rule_name + + +@action_function +def action_ukmo_stash(engine): + """Convert 'ukmo stash' cf property into a cube attribute.""" + rule_name = "fc_attribute_ukmo__um_stash_source" + var = engine.cf_var + attr_name = "ukmo__um_stash_source" + attr_value = getattr(var, attr_name, None) + if attr_value is None: + attr_altname = "um_stash_source" # legacy form + attr_value = getattr(var, attr_altname, None) + if attr_value is None: + rule_name += "(NOT-TRIGGERED)" + else: + # No helper routine : just do it + engine.cube.attributes["STASH"] = pp.STASH.from_msi(attr_value) + + return rule_name + + +@action_function +def action_ukmo_processflags(engine): + """Convert 'ukmo process flags' cf property into a cube attribute.""" + rule_name = "fc_attribute_ukmo__process_flags" + var = engine.cf_var + attr_name = "ukmo__process_flags" + attr_value = getattr(var, attr_name, None) + if attr_value is None: + rule_name += "(NOT-TRIGGERED)" + else: + # No helper routine : just do it + flags = [x.replace("_", " ") for x in attr_value.split(" ")] + engine.cube.attributes["ukmo__process_flags"] = tuple(flags) + + return rule_name + + +@action_function +def action_build_cell_measure(engine, cellm_fact): + """Convert a CFCellMeasureVariable into a cube cell-measure.""" + (var_name,) = cellm_fact + var = engine.cf_var.cf_group.cell_measures[var_name] + hh.build_cell_measures(engine, var) + + +@action_function +def action_build_ancil_var(engine, ancil_fact): + """Convert a CFAncillaryVariable into a cube ancil-var.""" + (var_name,) = ancil_fact + var = engine.cf_var.cf_group.ancillary_variables[var_name] + hh.build_ancil_var(engine, var) + + +@action_function +def action_build_label_coordinate(engine, label_fact): + """Convert a CFLabelVariable into a cube string-type aux-coord.""" + (var_name,) = label_fact + var = engine.cf_var.cf_group.labels[var_name] + hh.build_auxiliary_coordinate(engine, var) + + +@action_function +def action_formula_type(engine, formula_root_fact): + """Register a CFVariable as a formula root.""" + rule_name = "fc_formula_type" + (var_name,) = formula_root_fact + cf_var = engine.cf_var.cf_group[var_name] + # cf_var.standard_name is a formula type (or we should never get here). + formula_type = getattr(cf_var, "standard_name", None) + succeed = True + if formula_type not in iris.fileformats.cf.reference_terms: + succeed = False + rule_name += f"(FAILED - unrecognised formula type = {formula_type!r})" + msg = f"Ignored formula of unrecognised type: {formula_type!r}." + warnings.warn(msg) + if succeed: + # Check we don't already have one. + existing_type = engine.requires.get("formula_type") + if existing_type: + # NOTE: in this case, for now, we will accept the last appearing, + # which matches the older behaviour. + # TODO: this needs resolving, somehow. + succeed = False + msg = ( + "Omitting factories for some hybrid coordinates, as multiple " + "hybrid coordinates on a single variable are not supported: " + f"Formula of type ={formula_type!r} " + f"overrides another of type ={existing_type!r}.)" + ) + warnings.warn(msg) + rule_name += f"_{formula_type}" + # Set 'requires' info for iris.fileformats.netcdf._load_aux_factory. + engine.requires["formula_type"] = formula_type + + return rule_name + + +@action_function +def action_formula_term(engine, formula_term_fact): + """Register a CFVariable as a formula term.""" + # Must run AFTER formula root identification. + (termvar_name, rootvar_name, term_name) = formula_term_fact + # The rootname is implicit : have only one per cube + # TODO: change when we adopt cf-1.7 advanced grid-mapping syntax + engine.requires.setdefault("formula_terms", {})[term_name] = termvar_name + rule_name = f"fc_formula_term({term_name})" + return rule_name + + +def run_actions(engine): + """ + Run all actions for a cube. + + This is the top-level "activation" function which runs all the appropriate + rules actions to translate facts and build all the cube elements. + + The specific cube being translated is "engine.cube". + + """ + + # default (all cubes) action, always runs + action_default(engine) # This should run the default rules. + + # deal with grid-mappings + grid_mapping_facts = engine.fact_list("grid_mapping") + # For now, there should be at most *one* of these. + assert len(grid_mapping_facts) in (0, 1) + for grid_mapping_fact in grid_mapping_facts: + action_provides_grid_mapping(engine, grid_mapping_fact) + + # identify + record aka "PROVIDE" specific named coordinates + # N.B. cf.py has identified that these are dim-coords, NOT aux-coords + # (which are recorded separately). + # TODO: can probably remove this step ?? + dimcoord_facts = engine.fact_list("coordinate") + for dimcoord_fact in dimcoord_facts: + action_provides_coordinate(engine, dimcoord_fact) + + # build (dimension) coordinates + # The 'provides' step and the grid-mapping must have already been done. + providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") + for providescoord_fact in providescoord_facts: + action_build_dimension_coordinate(engine, providescoord_fact) + + # build aux-coords + auxcoord_facts = engine.fact_list("auxiliary_coordinate") + for auxcoord_fact in auxcoord_facts: + action_build_auxiliary_coordinate(engine, auxcoord_fact) + + # Detect + process and special 'ukmo' attributes + # Run on every cube : they choose themselves whether to trigger. + action_ukmo_stash(engine) + action_ukmo_processflags(engine) + + # cell measures + cellm_facts = engine.fact_list("cell_measure") + for cellm_fact in cellm_facts: + action_build_cell_measure(engine, cellm_fact) + + # ancillary variables + ancil_facts = engine.fact_list("ancillary_variable") + for ancil_fact in ancil_facts: + action_build_ancil_var(engine, ancil_fact) + + # label coords + label_facts = engine.fact_list("label") + for label_fact in label_facts: + action_build_label_coordinate(engine, label_fact) + + # formula root variables + formula_root_facts = engine.fact_list("formula_root") + for root_fact in formula_root_facts: + action_formula_type(engine, root_fact) + + # formula terms + # The 'formula_root's must have already been done. + formula_term_facts = engine.fact_list("formula_term") + for term_fact in formula_term_facts: + action_formula_term(engine, term_fact) diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py new file mode 100644 index 0000000000..497c2a12c9 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -0,0 +1,150 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines +in 'iris.fileformats.netcdf' with minimal changes to that code. + +This allows us to replace the Pyke rules operation with the simpler pure-Python +translation operations in :mod:`iris.fileformats._nc_load_rules.actions`. + +The core of this is the 'Engine' class, which mimics the Pyke engine operations, +as used by our code to translate each data cube. + +engine.get_kb() also returns a FactEntity object, which mimics *just enough* +API of a Pyke.knowlege_base, so that we can list its case-specific facts, as +used in :meth:`iris.fileformats.netcdf._actions_activation_stats`. + +""" +from .actions import run_actions + + +class FactEntity: + """ + An object with an 'entity_lists' property which is a dict of 'FactList's. + + A Factlist, in turn, is an object with property 'case_specific_facts', + which is a list of tuples of strings + (each of which is a 'fact' of the named class). + + To support the debug code : + kb_facts = engine.get_kb(_PYKE_FACT_BASE) + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) + + """ + + def __init__(self): + self.entity_lists = {} + + class _FactList: + # Just "an object with a 'case_specific_facts' property" (which is a list). + def __init__(self): + self.case_specific_facts = [] + + def add_fact(self, fact_name, args): + # Add a fact "fact_name(*args)". + if fact_name not in self.entity_lists: + self.entity_lists[fact_name] = self._FactList() + fact_list = self.entity_lists[fact_name] + fact_list.case_specific_facts.append(tuple(args)) + + def sect_facts(self, fact_name): + # Lookup all facts "fact_name(*args)" for a given fact_name. + if fact_name in self.entity_lists: + facts = self.entity_lists.get(fact_name).case_specific_facts + else: + facts = [] + return facts + + +class Engine: + """ + A minimal mimic of a Pyke.engine. + + Provides just enough API so that the existing code in + :mod:`iris.fileformats.netcdf` can interface with our new rules functions. + + A list of possible fact-arglists is stored, for each of a set of fact-names + (which are strings). + Each fact-argslist is represented by a tuple of values + -- at present, in practice, those are all strings too. + + """ + + def __init__(self): + """Init new engine.""" + self.reset() + + def reset(self): + """Reset the engine = remove all facts.""" + self.facts = FactEntity() + + def activate(self): + """ + Run all the translation rules to produce a single output cube. + + This implicitly references the output variable for this operation, + set by engine.cf_var (a CFDataVariable). + + The rules operation itself is coded elsewhere, + in :mod:`iris.fileformats.netcdf._nc_load_rules.actions`. + + """ + run_actions(self) + + def get_kb(self): + """ + Get a FactEntity, which mimic (bits of) a knowledge-base. + + Just allowing + :meth:`iris.fileformats.netcdf._action_activation_stats` to list the + facts. + + """ + return self.facts + + def print_stats(self): + """ + No-op, called by + :meth:`iris.fileformats.netcdf._action_activation_stats`. + + """ + pass + + def add_case_specific_fact(self, fact_name, fact_arglist): + """ + Record a fact about the current output operation. + + Roughly, + facts = self.facts.entity_lists[fact_name].case_specific_facts + facts.append(fact_arglist) + + """ + self.facts.add_fact(fact_name, fact_arglist) + + def fact_list(self, fact_name): + """ + Return the facts (arg-lists) for one fact name. + + A shorthand form used only by the new 'actions' routines. + + AKA 'case-specific-facts', in the original. + Roughly = "self.facts.entity_lists[fact_name].case_specific_facts". + + """ + return self.facts.sect_facts(fact_name) + + def add_fact(self, fact_name, fact_arglist): + """ + Add a new fact. + + A shorthand form used only by the new 'actions' routines. + + """ + self.add_case_specific_fact( + fact_name=fact_name, fact_arglist=fact_arglist + ) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py new file mode 100644 index 0000000000..a5b507d583 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -0,0 +1,1309 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +All the pure-Python 'helper' functions which were previously included in the +Pyke rules database 'fc_rules_cf.krb'. + +The 'action' routines now call these, as the rules used to do. +They have not changed, **except** that the 'build_coordinate_system' routine +acquired an extra initial 'engine' argument, purely for consistency with other +build routines, and which it does not use. + +""" + +import warnings + +import cf_units +import numpy as np +import numpy.ma as ma + +import iris.aux_factory +from iris.common.mixin import _get_valid_standard_name +import iris.coord_systems +import iris.coords +import iris.exceptions +import iris.fileformats.cf as cf +import iris.fileformats.netcdf +from iris.fileformats.netcdf import ( + UnknownCellMethodWarning, + _get_cf_var_data, + parse_cell_methods, +) +import iris.std_names +import iris.util + +# +# UD Units Constants (based on Unidata udunits.dat definition file) +# +UD_UNITS_LAT = [ + "degrees_north", + "degree_north", + "degree_n", + "degrees_n", + "degreen", + "degreesn", + "degrees", + "degrees north", + "degree north", + "degree n", + "degrees n", +] +UD_UNITS_LON = [ + "degrees_east", + "degree_east", + "degree_e", + "degrees_e", + "degreee", + "degreese", + "degrees", + "degrees east", + "degree east", + "degree e", + "degrees e", +] +UNKNOWN_UNIT_STRING = "?" +NO_UNIT_STRING = "-" + +# +# CF Dimensionless Vertical Coordinates +# +CF_COORD_VERTICAL = { + "atmosphere_ln_pressure_coordinate": ["p0", "lev"], + "atmosphere_sigma_coordinate": ["sigma", "ps", "ptop"], + "atmosphere_hybrid_sigma_pressure_coordinate": ["a", "b", "ps", "p0"], + "atmosphere_hybrid_height_coordinate": ["a", "b", "orog"], + "atmosphere_sleve_coordinate": [ + "a", + "b1", + "b2", + "ztop", + "zsurf1", + "zsurf2", + ], + "ocean_sigma_coordinate": ["sigma", "eta", "depth"], + "ocean_s_coordinate": ["s", "eta", "depth", "a", "b", "depth_c"], + "ocean_sigma_z_coordinate": [ + "sigma", + "eta", + "depth", + "depth_c", + "nsigma", + "zlev", + ], + "ocean_double_sigma_coordinate": [ + "sigma", + "depth", + "z1", + "z2", + "a", + "href", + "k_c", + ], + "ocean_s_coordinate_g1": ["s", "eta", "depth", "depth_c", "C"], + "ocean_s_coordinate_g2": ["s", "eta", "depth", "depth_c", "C"], +} + +# +# CF Grid Mappings +# +CF_GRID_MAPPING_ALBERS = "albers_conical_equal_area" +CF_GRID_MAPPING_AZIMUTHAL = "azimuthal_equidistant" +CF_GRID_MAPPING_LAMBERT_AZIMUTHAL = "lambert_azimuthal_equal_area" +CF_GRID_MAPPING_LAMBERT_CONFORMAL = "lambert_conformal_conic" +CF_GRID_MAPPING_LAMBERT_CYLINDRICAL = "lambert_cylindrical_equal_area" +CF_GRID_MAPPING_LAT_LON = "latitude_longitude" +CF_GRID_MAPPING_MERCATOR = "mercator" +CF_GRID_MAPPING_ORTHO = "orthographic" +CF_GRID_MAPPING_POLAR = "polar_stereographic" +CF_GRID_MAPPING_ROTATED_LAT_LON = "rotated_latitude_longitude" +CF_GRID_MAPPING_STEREO = "stereographic" +CF_GRID_MAPPING_TRANSVERSE = "transverse_mercator" +CF_GRID_MAPPING_VERTICAL = "vertical_perspective" +CF_GRID_MAPPING_GEOSTATIONARY = "geostationary" + +# +# CF Attribute Names. +# +CF_ATTR_AXIS = "axis" +CF_ATTR_BOUNDS = "bounds" +CF_ATTR_CALENDAR = "calendar" +CF_ATTR_CLIMATOLOGY = "climatology" +CF_ATTR_GRID_INVERSE_FLATTENING = "inverse_flattening" +CF_ATTR_GRID_EARTH_RADIUS = "earth_radius" +CF_ATTR_GRID_MAPPING_NAME = "grid_mapping_name" +CF_ATTR_GRID_NORTH_POLE_LAT = "grid_north_pole_latitude" +CF_ATTR_GRID_NORTH_POLE_LON = "grid_north_pole_longitude" +CF_ATTR_GRID_NORTH_POLE_GRID_LON = "north_pole_grid_longitude" +CF_ATTR_GRID_SEMI_MAJOR_AXIS = "semi_major_axis" +CF_ATTR_GRID_SEMI_MINOR_AXIS = "semi_minor_axis" +CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = "latitude_of_projection_origin" +CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = "longitude_of_projection_origin" +CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" +CF_ATTR_GRID_FALSE_EASTING = "false_easting" +CF_ATTR_GRID_FALSE_NORTHING = "false_northing" +CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN = "scale_factor_at_projection_origin" +CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = "scale_factor_at_central_meridian" +CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = "longitude_of_central_meridian" +CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" +CF_ATTR_GRID_PERSPECTIVE_HEIGHT = "perspective_point_height" +CF_ATTR_GRID_SWEEP_ANGLE_AXIS = "sweep_angle_axis" +CF_ATTR_POSITIVE = "positive" +CF_ATTR_STD_NAME = "standard_name" +CF_ATTR_LONG_NAME = "long_name" +CF_ATTR_UNITS = "units" +CF_ATTR_CELL_METHODS = "cell_methods" + +# +# CF Attribute Value Constants. +# +# Attribute - axis. +CF_VALUE_AXIS_X = "x" +CF_VALUE_AXIS_Y = "y" +CF_VALUE_AXIS_T = "t" +CF_VALUE_AXIS_Z = "z" + + +# Attribute - positive. +CF_VALUE_POSITIVE = ["down", "up"] + +# Attribute - standard_name. +CF_VALUE_STD_NAME_LAT = "latitude" +CF_VALUE_STD_NAME_LON = "longitude" +CF_VALUE_STD_NAME_GRID_LAT = "grid_latitude" +CF_VALUE_STD_NAME_GRID_LON = "grid_longitude" +CF_VALUE_STD_NAME_PROJ_X = "projection_x_coordinate" +CF_VALUE_STD_NAME_PROJ_Y = "projection_y_coordinate" + + +################################################################################ +def build_cube_metadata(engine): + """Add the standard meta data to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + + # Determine the cube's name attributes + cube.var_name = cf_var.cf_name + standard_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + long_name = getattr(cf_var, CF_ATTR_LONG_NAME, None) + cube.long_name = long_name + + if standard_name is not None: + try: + cube.standard_name = _get_valid_standard_name(standard_name) + except ValueError: + if cube.long_name is not None: + cube.attributes["invalid_standard_name"] = standard_name + else: + cube.long_name = standard_name + + # Determine the cube units. + attr_units = get_attr_units(cf_var, cube.attributes) + cube.units = attr_units + + # Incorporate cell methods + nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) + with warnings.catch_warnings(record=True) as warning_records: + cube.cell_methods = parse_cell_methods(nc_att_cell_methods) + # Filter to get the warning we are interested in. + warning_records = [ + record + for record in warning_records + if issubclass(record.category, UnknownCellMethodWarning) + ] + if len(warning_records) > 0: + # Output an enhanced warning message. + warn_record = warning_records[0] + name = "{}".format(cf_var.cf_name) + msg = warn_record.message.args[0] + msg = msg.replace("variable", "variable {!r}".format(name)) + warnings.warn(message=msg, category=UnknownCellMethodWarning) + + # Set the cube global attributes. + for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): + try: + cube.attributes[str(attr_name)] = attr_value + except ValueError as e: + msg = "Skipping global attribute {!r}: {}" + warnings.warn(msg.format(attr_name, str(e))) + + +################################################################################ +def _get_ellipsoid(cf_grid_var): + """Return the ellipsoid definition.""" + major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) + minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) + inverse_flattening = getattr( + cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None + ) + + # Avoid over-specification exception. + if major is not None and minor is not None: + inverse_flattening = None + + # Check for a default spherical earth. + if major is None and minor is None and inverse_flattening is None: + major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) + + return major, minor, inverse_flattening + + +################################################################################ +def build_coordinate_system(engine, cf_grid_var): + """Create a coordinate system from the CF-netCDF grid mapping variable.""" + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + return iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + +################################################################################ +def build_rotated_coordinate_system(engine, cf_grid_var): + """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + north_pole_latitude = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0 + ) + north_pole_longitude = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0 + ) + if north_pole_latitude is None or north_pole_longitude is None: + warnings.warn("Rotated pole position is not fully specified") + + north_pole_grid_lon = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + rcs = iris.coord_systems.RotatedGeogCS( + north_pole_latitude, + north_pole_longitude, + north_pole_grid_lon, + ellipsoid, + ) + + return rcs + + +################################################################################ +def build_transverse_mercator_coordinate_system(engine, cf_grid_var): + """ + Create a transverse Mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN, None + ) + + # The following accounts for the inconsistancy in the transverse + # mercator description within the CF spec. + if longitude_of_central_meridian is None: + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + if scale_factor_at_central_meridian is None: + scale_factor_at_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.TransverseMercator( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + scale_factor_at_central_meridian, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_lambert_conformal_coordinate_system(engine, cf_grid_var): + """ + Create a Lambert conformal conic coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.LambertConformal( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + standard_parallel, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_stereographic_coordinate_system(engine, cf_grid_var): + """ + Create a stereographic coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + # Iris currently only supports Stereographic projections with a scale + # factor of 1.0. This is checked elsewhere. + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Stereographic( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + true_scale_lat=None, + ellipsoid=ellipsoid, + ) + + return cs + + +################################################################################ +def build_mercator_coordinate_system(engine, cf_grid_var): + """ + Create a Mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + # Iris currently only supports Mercator projections with specific + # values for false_easting, false_northing, + # scale_factor_at_projection_origin and standard_parallel. These are + # checked elsewhere. + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Mercator( + longitude_of_projection_origin, ellipsoid=ellipsoid + ) + + return cs + + +################################################################################ +def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): + """ + Create a lambert azimuthal equal area coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.LambertAzimuthalEqualArea( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_albers_equal_area_coordinate_system(engine, cf_grid_var): + """ + Create a albers conical equal area coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + standard_parallels = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.AlbersEqualArea( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + standard_parallels, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_vertical_perspective_coordinate_system(engine, cf_grid_var): + """ + Create a vertical perspective coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.VerticalPerspective( + latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_geostationary_coordinate_system(engine, cf_grid_var): + """ + Create a geostationary coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + sweep_angle_axis = getattr( + cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Geostationary( + latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, + sweep_angle_axis, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def get_attr_units(cf_var, attributes): + attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) + if not attr_units: + attr_units = UNKNOWN_UNIT_STRING + + # Sanitise lat/lon units. + if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON: + attr_units = "degrees" + + # Graceful loading of invalid units. + try: + cf_units.as_unit(attr_units) + except ValueError: + # Using converted unicode message. Can be reverted with Python 3. + msg = "Ignoring netCDF variable {!r} invalid units {!r}".format( + cf_var.cf_name, attr_units + ) + warnings.warn(msg) + attributes["invalid_units"] = attr_units + attr_units = UNKNOWN_UNIT_STRING + + if np.issubdtype(cf_var.dtype, np.str_): + attr_units = NO_UNIT_STRING + + if any( + hasattr(cf_var.cf_data, name) + for name in ("flag_values", "flag_masks", "flag_meanings") + ): + attr_units = cf_units._NO_UNIT_STRING + + # Get any assoicated calendar for a time reference coordinate. + if cf_units.as_unit(attr_units).is_time_reference(): + attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) + + if attr_calendar: + attr_units = cf_units.Unit(attr_units, calendar=attr_calendar) + + return attr_units + + +################################################################################ +def get_names(cf_coord_var, coord_name, attributes): + """Determine the standard_name, long_name and var_name attributes.""" + + standard_name = getattr(cf_coord_var, CF_ATTR_STD_NAME, None) + long_name = getattr(cf_coord_var, CF_ATTR_LONG_NAME, None) + cf_name = str(cf_coord_var.cf_name) + + if standard_name is not None: + try: + standard_name = _get_valid_standard_name(standard_name) + except ValueError: + if long_name is not None: + attributes["invalid_standard_name"] = standard_name + if coord_name is not None: + standard_name = coord_name + else: + standard_name = None + else: + if coord_name is not None: + attributes["invalid_standard_name"] = standard_name + standard_name = coord_name + else: + standard_name = None + + else: + if coord_name is not None: + standard_name = coord_name + + # Last attempt to set the standard name to something meaningful. + if standard_name is None: + if cf_name in iris.std_names.STD_NAMES: + standard_name = cf_name + + return (standard_name, long_name, cf_name) + + +################################################################################ +def get_cf_bounds_var(cf_coord_var): + """ + Return the CF variable representing the bounds of a coordinate + variable. + + """ + attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) + attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) + + # Determine bounds, prefering standard bounds over climatology. + # NB. No need to raise a warning if the bounds/climatology + # variable is missing, as that will already have been done by + # iris.fileformats.cf. + cf_bounds_var = None + climatological = False + if attr_bounds is not None: + bounds_vars = cf_coord_var.cf_group.bounds + if attr_bounds in bounds_vars: + cf_bounds_var = bounds_vars[attr_bounds] + elif attr_climatology is not None: + climatology_vars = cf_coord_var.cf_group.climatology + if attr_climatology in climatology_vars: + cf_bounds_var = climatology_vars[attr_climatology] + climatological = True + + if attr_bounds is not None and attr_climatology is not None: + warnings.warn( + "Ignoring climatology in favour of bounds attribute " + "on NetCDF variable {!r}.".format(cf_coord_var.cf_name) + ) + + return cf_bounds_var, climatological + + +################################################################################ +def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): + """ + Return a bounds_data array with the vertex dimension as the most + rapidly varying. + + .. note:: + + This function assumes the dimension names of the coordinate + variable match those of the bounds variable in order to determine + which is the vertex dimension. + + + """ + vertex_dim_names = set(cf_bounds_var.dimensions).difference( + cf_coord_var.dimensions + ) + if len(vertex_dim_names) != 1: + msg = ( + "Too many dimension names differ between coordinate " + "variable {!r} and the bounds variable {!r}. " + "Expected 1, got {}." + ) + raise ValueError( + msg.format( + str(cf_coord_var.cf_name), + str(cf_bounds_var.cf_name), + len(vertex_dim_names), + ) + ) + vertex_dim = cf_bounds_var.dimensions.index(*vertex_dim_names) + bounds_data = np.rollaxis( + bounds_data.view(), vertex_dim, len(bounds_data.shape) + ) + return bounds_data + + +################################################################################ +def build_dimension_coordinate( + engine, cf_coord_var, coord_name=None, coord_system=None +): + """Create a dimension coordinate (DimCoord) and add it to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + attr_units = get_attr_units(cf_coord_var, attributes) + points_data = cf_coord_var[:] + # Gracefully fill points masked array. + if ma.is_masked(points_data): + points_data = ma.filled(points_data) + msg = "Gracefully filling {!r} dimension coordinate masked points" + warnings.warn(msg.format(str(cf_coord_var.cf_name))) + + # Get any coordinate bounds. + cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + if cf_bounds_var is not None: + bounds_data = cf_bounds_var[:] + # Gracefully fill bounds masked array. + if ma.is_masked(bounds_data): + bounds_data = ma.filled(bounds_data) + msg = "Gracefully filling {!r} dimension coordinate masked bounds" + warnings.warn(msg.format(str(cf_coord_var.cf_name))) + # Handle transposed bounds where the vertex dimension is not + # the last one. Test based on shape to support different + # dimension names. + if cf_bounds_var.shape[:-1] != cf_coord_var.shape: + bounds_data = reorder_bounds_data( + bounds_data, cf_bounds_var, cf_coord_var + ) + else: + bounds_data = None + + # Determine whether the coordinate is circular. + circular = False + if ( + points_data.ndim == 1 + and coord_name in [CF_VALUE_STD_NAME_LON, CF_VALUE_STD_NAME_GRID_LON] + and cf_units.Unit(attr_units) + in [cf_units.Unit("radians"), cf_units.Unit("degrees")] + ): + modulus_value = cf_units.Unit(attr_units).modulus + circular = iris.util._is_circular( + points_data, modulus_value, bounds=bounds_data + ) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names( + cf_coord_var, coord_name, attributes + ) + + # Create the coordinate. + try: + coord = iris.coords.DimCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + circular=circular, + climatological=climatological, + ) + except ValueError as e_msg: + # Attempt graceful loading. + coord = iris.coords.AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + climatological=climatological, + ) + cube.add_aux_coord(coord, data_dims) + msg = ( + "Failed to create {name!r} dimension coordinate: {error}\n" + "Gracefully creating {name!r} auxiliary coordinate instead." + ) + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + else: + # Add the dimension coordinate to the cube. + if data_dims: + cube.add_dim_coord(coord, data_dims) + else: + # Scalar coords are placed in the aux_coords container. + cube.add_aux_coord(coord, data_dims) + + # Update the coordinate to CF-netCDF variable mapping. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + + +################################################################################ +def build_auxiliary_coordinate( + engine, cf_coord_var, coord_name=None, coord_system=None +): + """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_coord_var, attributes) + + # Get any coordinate point data. + if isinstance(cf_coord_var, cf.CFLabelVariable): + points_data = cf_coord_var.cf_label_data(cf_var) + else: + points_data = _get_cf_var_data(cf_coord_var, engine.filename) + + # Get any coordinate bounds. + cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + if cf_bounds_var is not None: + bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) + + # Handle transposed bounds where the vertex dimension is not + # the last one. Test based on shape to support different + # dimension names. + if cf_bounds_var.shape[:-1] != cf_coord_var.shape: + # Resolving the data to a numpy array (i.e. *not* masked) for + # compatibility with array creators (i.e. dask) + bounds_data = np.asarray(bounds_data) + bounds_data = reorder_bounds_data( + bounds_data, cf_bounds_var, cf_coord_var + ) + else: + bounds_data = None + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names( + cf_coord_var, coord_name, attributes + ) + + # Create the coordinate + coord = iris.coords.AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + climatological=climatological, + ) + + # Add it to the cube + cube.add_aux_coord(coord, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + + +################################################################################ +def build_cell_measures(engine, cf_cm_var): + """Create a CellMeasure instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_cm_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_cm_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) + + # Obtain the cf_measure. + measure = cf_cm_var.cf_measure + + # Create the CellMeasure + cell_measure = iris.coords.CellMeasure( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + measure=measure, + ) + + # Add it to the cube + cube.add_cell_measure(cell_measure, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["cell_measures"].append( + (cell_measure, cf_cm_var.cf_name) + ) + + +################################################################################ +def build_ancil_var(engine, cf_av_var): + """Create an AncillaryVariable instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_av_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_av_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the AV being built. + common_dims = [ + dim for dim in cf_av_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) + + # Create the AncillaryVariable + av = iris.coords.AncillaryVariable( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + ) + + # Add it to the cube + cube.add_ancillary_variable(av, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name)) + + +################################################################################ +def _is_lat_lon( + cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes +): + """ + Determine whether the CF coordinate variable is a latitude/longitude variable. + + Ref: [CF] Section 4.1 Latitude Coordinate. + [CF] Section 4.2 Longitude Coordinate. + + """ + is_valid = False + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + if attr_units is not None: + attr_units = attr_units.lower() + is_valid = attr_units in ud_units + + # Special case - Check for rotated pole. + if attr_units == "degrees": + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + if attr_std_name is not None: + is_valid = attr_std_name.lower() == std_name_grid + else: + is_valid = False + # TODO: check that this interpretation of axis is correct. + attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) + if attr_axis is not None: + is_valid = attr_axis.lower() == axis_name + else: + # Alternative is to check standard_name or axis. + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + + if attr_std_name is not None: + attr_std_name = attr_std_name.lower() + is_valid = attr_std_name in [std_name, std_name_grid] + if not is_valid: + is_valid = any( + [attr_std_name.startswith(prefix) for prefix in prefixes] + ) + else: + attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) + + if attr_axis is not None: + is_valid = attr_axis.lower() == axis_name + + return is_valid + + +################################################################################ +def is_latitude(engine, cf_name): + """Determine whether the CF coordinate variable is a latitude variable.""" + cf_var = engine.cf_var.cf_group[cf_name] + return _is_lat_lon( + cf_var, + UD_UNITS_LAT, + CF_VALUE_STD_NAME_LAT, + CF_VALUE_STD_NAME_GRID_LAT, + CF_VALUE_AXIS_Y, + ["lat", "rlat"], + ) + + +################################################################################ +def is_longitude(engine, cf_name): + """Determine whether the CF coordinate variable is a longitude variable.""" + cf_var = engine.cf_var.cf_group[cf_name] + return _is_lat_lon( + cf_var, + UD_UNITS_LON, + CF_VALUE_STD_NAME_LON, + CF_VALUE_STD_NAME_GRID_LON, + CF_VALUE_AXIS_X, + ["lon", "rlon"], + ) + + +################################################################################ +def is_projection_x_coordinate(engine, cf_name): + """ + Determine whether the CF coordinate variable is a + projection_x_coordinate variable. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( + cf_var, CF_ATTR_LONG_NAME, None + ) + return attr_name == CF_VALUE_STD_NAME_PROJ_X + + +################################################################################ +def is_projection_y_coordinate(engine, cf_name): + """ + Determine whether the CF coordinate variable is a + projection_y_coordinate variable. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( + cf_var, CF_ATTR_LONG_NAME, None + ) + return attr_name == CF_VALUE_STD_NAME_PROJ_Y + + +################################################################################ +def is_time(engine, cf_name): + """ + Determine whether the CF coordinate variable is a time variable. + + Ref: [CF] Section 4.4 Time Coordinate. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + attr_axis = getattr(cf_var, CF_ATTR_AXIS, "") + try: + is_time_reference = cf_units.Unit(attr_units or 1).is_time_reference() + except ValueError: + is_time_reference = False + + return is_time_reference and ( + attr_std_name == "time" or attr_axis.lower() == CF_VALUE_AXIS_T + ) + + +################################################################################ +def is_time_period(engine, cf_name): + """Determine whether the CF coordinate variable represents a time period.""" + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + if attr_units is not None: + try: + is_valid = cf_units.is_time(attr_units) + except ValueError: + is_valid = False + + return is_valid + + +################################################################################ +def is_grid_mapping(engine, cf_name, grid_mapping): + """Determine whether the CF grid mapping variable is of the appropriate type.""" + + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_mapping_name = getattr(cf_var, CF_ATTR_GRID_MAPPING_NAME, None) + + if attr_mapping_name is not None: + is_valid = attr_mapping_name.lower() == grid_mapping + + return is_valid + + +################################################################################ +def _is_rotated(engine, cf_name, cf_attr_value): + """Determine whether the CF coordinate variable is rotated.""" + + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + + if attr_std_name is not None: + is_valid = attr_std_name.lower() == cf_attr_value + else: + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + if attr_units is not None: + is_valid = attr_units.lower() == "degrees" + + return is_valid + + +################################################################################ +def is_rotated_latitude(engine, cf_name): + """Determine whether the CF coodinate variable is rotated latitude.""" + return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LAT) + + +############################################################################### +def is_rotated_longitude(engine, cf_name): + """Determine whether the CF coordinate variable is rotated longitude.""" + return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LON) + + +################################################################################ +def has_supported_mercator_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has the supported + values for the parameters of the Mercator projection.""" + + is_valid = True + cf_grid_var = engine.cf_var.cf_group[cf_name] + + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + if false_easting is not None and false_easting != 0: + warnings.warn( + "False eastings other than 0.0 not yet supported " + "for Mercator projections" + ) + is_valid = False + if false_northing is not None and false_northing != 0: + warnings.warn( + "False northings other than 0.0 not yet supported " + "for Mercator projections" + ) + is_valid = False + if ( + scale_factor_at_projection_origin is not None + and scale_factor_at_projection_origin != 1 + ): + warnings.warn( + "Scale factors other than 1.0 not yet supported for " + "Mercator projections" + ) + is_valid = False + if standard_parallel is not None and standard_parallel != 0: + warnings.warn( + "Standard parallels other than 0.0 not yet " + "supported for Mercator projections" + ) + is_valid = False + + return is_valid + + +################################################################################ +def has_supported_stereographic_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has a value of 1.0 + for the scale_factor_at_projection_origin attribute.""" + + is_valid = True + cf_grid_var = engine.cf_var.cf_group[cf_name] + + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + if ( + scale_factor_at_projection_origin is not None + and scale_factor_at_projection_origin != 1 + ): + warnings.warn( + "Scale factors other than 1.0 not yet supported for " + "stereographic projections" + ) + is_valid = False + + return is_valid diff --git a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb deleted file mode 100644 index d41ec6aa3e..0000000000 --- a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb +++ /dev/null @@ -1,2355 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# -# Pyke forward chaining rule interface to translate NetCDF Climate Forecast (CF) -# Metadata Conventions data into an Iris cube. -# -# References: -# -# [CF] NetCDF Climate and Forecast (CF) Metadata conventions, Version 1.5, October, 2010. -# - - -# -# Context: -# This rule will always trigger. -# -# Purpose: -# Add standard meta-data to the cube. -# -fc_default - assert - python build_cube_metadata(engine) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a rotated pole. -# -# Purpose: -# Creates the rotated pole lat/lon coordinate system. -# -fc_provides_grid_mapping_rotated_latitude_longitude - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_ROTATED_LAT_LON) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_rotated_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a regular lat/lon. -# -# Purpose: -# Creates the lat/lon coordinate system. -# -fc_provides_grid_mapping_latitude_longitude - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_LAT_LON) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_coordinate_system(cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, latitude_longitude) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a transverse Mercator. -# -# Purpose: -# Creates the transverse Mercator coordinate system. -# -fc_provides_grid_mapping_transverse_mercator - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_TRANSVERSE) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_transverse_mercator_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, transverse_mercator) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a Mercator. -# -# Purpose: -# Creates the Mercator coordinate system. -# -fc_provides_grid_mapping_mercator - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_MERCATOR) - check has_supported_mercator_parameters(engine, $grid_mapping) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_mercator_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, mercator) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a stereographic. -# -# Purpose: -# Creates the stereographic coordinate system. -# -fc_provides_grid_mapping_stereographic - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_STEREO) - check has_supported_stereographic_parameters(engine, $grid_mapping) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_stereographic_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, stereographic) - python engine.rule_triggered.add(rule.name) - - -# -# Context: This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a Lambert conformal. -# -# Purpose: -# Creates the Lambert conformal conic coordinate system. -# -fc_provides_grid_mapping_lambert_conformal - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_LAMBERT_CONFORMAL) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_lambert_conformal_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, lambert_conformal) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a lambert azimuthal equal area. -# -# Purpose: -# Creates the lambert azimuthal equal area coordinate system. -# -fc_provides_grid_mapping_lambert_azimuthal_equal_area - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_LAMBERT_AZIMUTHAL) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a albers conical equal area. -# -# Purpose: -# Creates the albers conical equal area coordinate system. -# -fc_provides_grid_mapping_albers_equal_area - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_ALBERS) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_albers_equal_area_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, albers_equal_area) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a vertical perspective. -# -# Purpose: -# Creates the vertical perspective coordinate system. -# -fc_provides_grid_mapping_vertical_perspective - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_VERTICAL) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = \ - build_vertical_perspective_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, vertical_perspective) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a geostationary. -# -# Purpose: -# Creates the geostationary coordinate system. -# -fc_provides_grid_mapping_geostationary - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, - CF_GRID_MAPPING_GEOSTATIONARY) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = \ - build_geostationary_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, geostationary) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF latitude coordinate. -# -# Purpose: -# Assert that the CF latitude coordinate exists. -# -fc_provides_coordinate_latitude - foreach - facts_cf.coordinate($coordinate) - check is_latitude(engine, $coordinate) - assert - facts_cf.provides(coordinate, latitude, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF longitude coordinate. -# -# Purpose: -# Assert that the CF longitude coordinate exists. -# -fc_provides_coordinate_longitude - foreach - facts_cf.coordinate($coordinate) - check is_longitude(engine, $coordinate) - assert - facts_cf.provides(coordinate, longitude, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF projection_x_coordinate. -# -# Purpose: -# Assert that the CF projection_x_coordinate exists. -# -fc_provides_projection_x_coordinate - foreach - facts_cf.coordinate($coordinate) - check is_projection_x_coordinate(engine, $coordinate) - assert - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF projection_y_coordinate. -# -# Purpose: -# Assert that the CF projection_y_coordinate exists. -# -fc_provides_projection_y_coordinate - foreach - facts_cf.coordinate($coordinate) - check is_projection_y_coordinate(engine, $coordinate) - assert - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF time coordinate. -# -# Purpose: -# Assert that the CF time coordinate exists. -# -fc_provides_coordinate_time - foreach - facts_cf.coordinate($coordinate) - check is_time(engine, $coordinate) - assert - facts_cf.provides(coordinate, time, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# had been asserted that contains units of time, but is not -# a time reference. -# -# Purpose: -# Assert that the forecast period coordinate exists. -# -fc_provides_coordinate_time_period - foreach - facts_cf.coordinate($coordinate) - check is_time_period(engine, $coordinate) - assert - facts_cf.provides(coordinate, time_period, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger for each label() case specific fact. -# -# Purpose: -# Add the label coordinate to the cube. -# -fc_build_label_coordinate - foreach - facts_cf.label($coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.labels[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that contains units that are a time reference or an -# axis of time. -# -# Purpose: -# Add the time reference auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_time - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_time(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that contains units of time, but is not a time reference. -# -# Purpose: -# Add the time period auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_time_period - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_time_period(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that refers to non-rotated pole latitude data. -# -# Purpose: -# Add the latitude auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_latitude - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_latitude(engine, $coordinate) - check not is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LAT) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordiante() case specific fact -# has been asserted that refers to rotated pole latitude data. -# -# Purpose: -# Add the rotated pole latitude auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_latitude_rotated - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_latitude(engine, $coordinate) - check is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LAT) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that refers to non-rotated pole longitude data. -# -# Purpose: -# Add the longitude auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_longitude - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_longitude(engine, $coordinate) - check not is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LON) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that refers to rotated pole longitude data. -# -# Purpose: -# Add the rotated pole auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_longitude_rotated - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_longitude(engine, $coordinate) - check is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LON) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger for each auxiliary_coordinate() case specific fact -# that is not a spatio-temporal related auxiliary coordinate. -# -# Purpose: -# Add the auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate - foreach - facts_cf.auxiliary_coordinate($coordinate) - check not is_time(engine, $coordinate) - check not is_time_period(engine, $coordinate) - check not is_latitude(engine, $coordinate) - check not is_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger for each cell_measure case specific fact. -# -# Purpose: -# Add the cell measures attribute to the cube. -# -fc_build_cell_measure - foreach - facts_cf.cell_measure($coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.cell_measures[$coordinate] - python build_cell_measures(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger for each ancillary_variable case specific fact. -# -# Purpose: -# Add the ancillary variable to the cube. -# -fc_build_ancil_var - foreach - facts_cf.ancillary_variable($var) - assert - python ancil_var = engine.cf_var.cf_group.ancillary_variables[$var] - python build_ancil_var(engine, ancil_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF latitude coordinate exists and -# a lat/lon coordinate system exists. -# -# Purpose: -# Add the latitude coordinate into the cube. -# -fc_build_coordinate_latitude - foreach - facts_cf.provides(coordinate, latitude, $coordinate) - facts_cf.provides(coordinate_system, latitude_longitude) - check not is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LAT, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF rotated latitude coordinate exists and -# a rotated lat/lon coordinate system exists. -# -# Purpose: -# Add the rotated latitude coordinate into the cube. -# -fc_build_coordinate_latitude_rotated - foreach - facts_cf.provides(coordinate, latitude, $coordinate) - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - check is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LAT, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF longitude coordinate exists and -# a lat/lon coordinate system exists. -# -# Purpose: -# Add the longitude coordinate into the cube. -# -fc_build_coordinate_longitude - foreach - facts_cf.provides(coordinate, longitude, $coordinate) - facts_cf.provides(coordinate_system, latitude_longitude) - check not is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LON, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF rotated longitude coordinate exists and -# a rotated lat/lon coordinate system exists. -# -# Purpose: -# Add the rotated longitude coordinate into the cube. -# -fc_build_coordinate_longitude_rotated - foreach - facts_cf.provides(coordinate, longitude, $coordinate) - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - check is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LON, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF latitude coordinate exists and -# no coordinate system exists. -# -# Purpose: -# Add the latitude coordinate into the cube. -# -fc_build_coordinate_latitude_nocs - foreach - facts_cf.provides(coordinate, latitude, $coordinate) - notany - facts_cf.provides(coordinate_system, latitude_longitude) - notany - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LAT, - coord_system=None) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF longitude coordinate exists and -# no lat/lon coordinate system exists. -# -# Purpose: -# Add the longitude coordinate into the cube. -# -fc_build_coordinate_longitude_nocs - foreach - facts_cf.provides(coordinate, longitude, $coordinate) - notany - facts_cf.provides(coordinate_system, latitude_longitude) - notany - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LON, - coord_system=None) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a transverse Mercator coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_transverse_mercator - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, transverse_mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a transverse Mercator coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_transverse_mercator - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, transverse_mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a Lambert conformal coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_lambert_conformal - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_conformal) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a Lambert conformal coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_lambert_conformal - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_conformal) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a Mercator coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_mercator - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a Mercator coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_mercator - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a sterographic coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_stereographic - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, stereographic) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a stereographic coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_stereographic - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, stereographic) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a lambert azimuthal equal area coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_lambert_azimuthal_equal_area - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a lambert azimuthal equal area coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_lambert_azimuthal_equal_area - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a albers conical equal area coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_albers_equal_area - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, albers_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a albers conical equal area coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_albers_equal_area - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, albers_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a vertical perspective coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_vertical_perspective - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, vertical_perspective) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a vertical perspective coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_vertical_perspective - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, vertical_perspective) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a geostationary coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_geostationary - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, geostationary) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a geostationary coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_geostationary - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, geostationary) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF time coordinate exists. -# -# Purpose: -# Add the time coordinate into the cube. -# -fc_build_coordinate_time - foreach - facts_cf.provides(coordinate, time, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a time period coordinate exists. -# -# Purpose: -# Add the time period coordinate to the cube. -# -fc_build_coordinate_time_period - foreach - facts_cf.provides(coordinate, time_period, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff there exists an "unclassifed" coordinate. -# i.e. a coordinate that is not a latitude, longitude, time or vertical coordinate. -# -# Purpose: -# Add the miscellaneous coordinate into the cube. -# -fc_default_coordinate - foreach - facts_cf.coordinate($coordinate) - notany - facts_cf.provides(coordinate, $_, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var) - facts_cf.provides(coordinate, miscellaneous, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff the "um_stash_source" or "ukmo__um_stash_source" attributes exist -# on the CF-netCDF data variable. -# -# Purpose: -# Add the CF-netCDF data variable "um_stash_source" attribute to the -# cube attributes dictionary as a "STASH" key. -# -fc_attribute_ukmo__um_stash_source - foreach - check hasattr(engine.cf_var, 'ukmo__um_stash_source') or hasattr(engine.cf_var, 'um_stash_source') - assert - python attr_value = getattr(engine.cf_var, 'um_stash_source', None) or getattr(engine.cf_var, 'ukmo__um_stash_source') - python engine.cube.attributes['STASH'] = pp.STASH.from_msi(attr_value) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff the "ukmo__process_flags" attribute exists -# on the CF-netCDF data variable. -# -# Purpose: -# Add the CF-netCDF data variable "ukmo__process_flags" attribute to the -# cube attributes dictionary as a "ukmo__process_flags" key. -# -fc_attribute_ukmo__process_flags - foreach - check hasattr(engine.cf_var, 'ukmo__process_flags') - assert - python attr_value = engine.cf_var.ukmo__process_flags - python engine.cube.attributes['ukmo__process_flags'] = tuple([x.replace("_", " ") for x in attr_value.split(" ")]) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a formula term that refers to a -# dimensionless vertical coordinate of hybrid height. -# -# Purpose: -# Assert that the formula term refers to hybrid height. -# -fc_formula_type_atmosphere_hybrid_height_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'atmosphere_hybrid_height_coordinate' - assert - python engine.requires['formula_type'] = 'atmosphere_hybrid_height_coordinate' - facts_cf.formula_type(atmosphere_hybrid_height_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a formula term that refers to a -# dimensionless vertical coordinate of hybrid pressure. -# -# Purpose: -# Assert that the formula term refers to hybrid pressure. -# -fc_formula_type_atmosphere_hybrid_sigma_pressure_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'atmosphere_hybrid_sigma_pressure_coordinate' - assert - python engine.requires['formula_type'] = 'atmosphere_hybrid_sigma_pressure_coordinate' - facts_cf.formula_type(atmosphere_hybrid_height_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a formula term that refers to a -# dimensionless vertical coordinate of ocean sigma over z -# -# Purpose: -# Assert that the formula term refers to ocean sigma over z. -# -fc_formula_type_ocean_sigma_z_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_sigma_z_coordinate' - assert - python engine.requires['formula_type'] = 'ocean_sigma_z_coordinate' - facts_cf.formula_type(ocean_sigma_z_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of ocean sigma -# -# Purpose: -# Assert that the formula term refers to ocean sigma -# -fc_formula_type_ocean_sigma_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_sigma_coordinate' - assert - python engine.requires['formula_type'] = 'ocean_sigma_coordinate' - facts_cf.formula_type(ocean_sigma_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of ocean s coordinate -# -# Purpose: -# Assert that the formula term refers to ocean sigma -# -fc_formula_type_ocean_s_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_s_coordinate' - assert - python engine.requires['formula_type'] = 'ocean_s_coordinate' - facts_cf.formula_type(ocean_s_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of Ocean s-coordinate g1 -# -# Purpose: -# Assert that the formula term refers to Ocean s-coordinate g1 -# -fc_formula_type_ocean_s_coordinate_g1 - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_s_coordinate_g1' - assert - python engine.requires['formula_type'] = 'ocean_s_coordinate_g1' - facts_cf.formula_type(ocean_s_coordinate_g1) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of Ocean s-coordinate g2 -# -# Purpose: -# Assert that the formula term refers to Ocean s-coordinate g2 -# -fc_formula_type_ocean_s_coordinate_g2 - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_s_coordinate_g2' - assert - python engine.requires['formula_type'] = 'ocean_s_coordinate_g2' - facts_cf.formula_type(ocean_s_coordinate_g2) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger for variables referenced by a dimensionless -# vertical coordinate. -# -# Purpose: -# Build a mapping from term name to netCDF variable name. -# -fc_formula_terms - foreach - facts_cf.formula_root($coordinate) - facts_cf.formula_term($var_name, $coordinate, $term) - assert - python engine.requires.setdefault('formula_terms', {})[$term] = $var_name - python engine.rule_triggered.add(rule.name) - - -fc_extras - import warnings - - import cf_units - import netCDF4 - import numpy as np - import numpy.ma as ma - - import iris.aux_factory - from iris.common.mixin import _get_valid_standard_name - import iris.coords - import iris.coord_systems - import iris.fileformats.cf as cf - import iris.fileformats.netcdf - from iris.fileformats.netcdf import _get_cf_var_data, parse_cell_methods, UnknownCellMethodWarning - import iris.fileformats.pp as pp - import iris.exceptions - import iris.std_names - import iris.util - from iris._lazy_data import as_lazy_data - - - # - # UD Units Constants (based on Unidata udunits.dat definition file) - # - UD_UNITS_LAT = ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', - 'degreen', 'degreesn', 'degrees', 'degrees north', - 'degree north', 'degree n', 'degrees n'] - UD_UNITS_LON = ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', - 'degreee', 'degreese', 'degrees', 'degrees east', - 'degree east', 'degree e', 'degrees e'] - UNKNOWN_UNIT_STRING = "?" - NO_UNIT_STRING = "-" - - # - # CF Dimensionless Vertical Coordinates - # - CF_COORD_VERTICAL = {'atmosphere_ln_pressure_coordinate':['p0', 'lev'], - 'atmosphere_sigma_coordinate':['sigma', 'ps', 'ptop'], - 'atmosphere_hybrid_sigma_pressure_coordinate':['a', 'b', 'ps', 'p0'], - 'atmosphere_hybrid_height_coordinate':['a', 'b', 'orog'], - 'atmosphere_sleve_coordinate':['a', 'b1', 'b2', 'ztop', 'zsurf1', 'zsurf2'], - 'ocean_sigma_coordinate':['sigma', 'eta', 'depth'], - 'ocean_s_coordinate':['s', 'eta', 'depth', 'a', 'b', 'depth_c'], - 'ocean_sigma_z_coordinate':['sigma', 'eta', 'depth', 'depth_c', 'nsigma', 'zlev'], - 'ocean_double_sigma_coordinate':['sigma', 'depth', 'z1', 'z2', 'a', 'href', 'k_c'], - 'ocean_s_coordinate_g1':['s', 'eta', 'depth', 'depth_c', 'C'], - 'ocean_s_coordinate_g2':['s', 'eta', 'depth', 'depth_c', 'C']} - - # - # CF Grid Mappings - # - CF_GRID_MAPPING_ALBERS = 'albers_conical_equal_area' - CF_GRID_MAPPING_AZIMUTHAL = 'azimuthal_equidistant' - CF_GRID_MAPPING_LAMBERT_AZIMUTHAL = 'lambert_azimuthal_equal_area' - CF_GRID_MAPPING_LAMBERT_CONFORMAL = 'lambert_conformal_conic' - CF_GRID_MAPPING_LAMBERT_CYLINDRICAL = 'lambert_cylindrical_equal_area' - CF_GRID_MAPPING_LAT_LON = 'latitude_longitude' - CF_GRID_MAPPING_MERCATOR = 'mercator' - CF_GRID_MAPPING_ORTHO = 'orthographic' - CF_GRID_MAPPING_POLAR = 'polar_stereographic' - CF_GRID_MAPPING_ROTATED_LAT_LON = 'rotated_latitude_longitude' - CF_GRID_MAPPING_STEREO = 'stereographic' - CF_GRID_MAPPING_TRANSVERSE = 'transverse_mercator' - CF_GRID_MAPPING_VERTICAL = 'vertical_perspective' - CF_GRID_MAPPING_GEOSTATIONARY = 'geostationary' - - # - # CF Attribute Names. - # - CF_ATTR_AXIS = 'axis' - CF_ATTR_BOUNDS = 'bounds' - CF_ATTR_CALENDAR = 'calendar' - CF_ATTR_CLIMATOLOGY = 'climatology' - CF_ATTR_GRID_INVERSE_FLATTENING = 'inverse_flattening' - CF_ATTR_GRID_EARTH_RADIUS = 'earth_radius' - CF_ATTR_GRID_MAPPING_NAME = 'grid_mapping_name' - CF_ATTR_GRID_NORTH_POLE_LAT = 'grid_north_pole_latitude' - CF_ATTR_GRID_NORTH_POLE_LON = 'grid_north_pole_longitude' - CF_ATTR_GRID_NORTH_POLE_GRID_LON = 'north_pole_grid_longitude' - CF_ATTR_GRID_SEMI_MAJOR_AXIS = 'semi_major_axis' - CF_ATTR_GRID_SEMI_MINOR_AXIS = 'semi_minor_axis' - CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = 'latitude_of_projection_origin' - CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = 'longitude_of_projection_origin' - CF_ATTR_GRID_STANDARD_PARALLEL = 'standard_parallel' - CF_ATTR_GRID_FALSE_EASTING = 'false_easting' - CF_ATTR_GRID_FALSE_NORTHING = 'false_northing' - CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN = 'scale_factor_at_projection_origin' - CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = 'scale_factor_at_central_meridian' - CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = 'longitude_of_central_meridian' - CF_ATTR_GRID_STANDARD_PARALLEL = 'standard_parallel' - CF_ATTR_GRID_PERSPECTIVE_HEIGHT = 'perspective_point_height' - CF_ATTR_GRID_SWEEP_ANGLE_AXIS = 'sweep_angle_axis' - CF_ATTR_POSITIVE = 'positive' - CF_ATTR_STD_NAME = 'standard_name' - CF_ATTR_LONG_NAME = 'long_name' - CF_ATTR_UNITS = 'units' - CF_ATTR_CELL_METHODS = 'cell_methods' - - # - # CF Attribute Value Constants. - # - # Attribute - axis. - CF_VALUE_AXIS_X = 'x' - CF_VALUE_AXIS_Y = 'y' - CF_VALUE_AXIS_T = 't' - CF_VALUE_AXIS_Z = 'z' - - - # Attribute - positive. - CF_VALUE_POSITIVE = ['down', 'up'] - - # Attribute - standard_name. - CF_VALUE_STD_NAME_LAT = 'latitude' - CF_VALUE_STD_NAME_LON = 'longitude' - CF_VALUE_STD_NAME_GRID_LAT = 'grid_latitude' - CF_VALUE_STD_NAME_GRID_LON = 'grid_longitude' - CF_VALUE_STD_NAME_PROJ_X = 'projection_x_coordinate' - CF_VALUE_STD_NAME_PROJ_Y = 'projection_y_coordinate' - - - ################################################################################ - def build_cube_metadata(engine): - """Add the standard meta data to the cube.""" - - cf_var = engine.cf_var - cube = engine.cube - - # Determine the cube's name attributes - cube.var_name = cf_var.cf_name - standard_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - long_name = getattr(cf_var, CF_ATTR_LONG_NAME, None) - cube.long_name = long_name - - if standard_name is not None: - try: - cube.standard_name = _get_valid_standard_name(standard_name) - except ValueError: - if cube.long_name is not None: - cube.attributes['invalid_standard_name'] = standard_name - else: - cube.long_name = standard_name - - # Determine the cube units. - attr_units = get_attr_units(cf_var, cube.attributes) - cube.units = attr_units - - # Incorporate cell methods - nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) - with warnings.catch_warnings(record=True) as warning_records: - cube.cell_methods = parse_cell_methods(nc_att_cell_methods) - # Filter to get the warning we are interested in. - warning_records = [record for record in warning_records - if issubclass(record.category, UnknownCellMethodWarning)] - if len(warning_records) > 0: - # Output an enhanced warning message. - warn_record = warning_records[0] - name = '{}'.format(cf_var.cf_name) - msg = warn_record.message.args[0] - msg = msg.replace('variable', 'variable {!r}'.format(name)) - warnings.warn(message=msg, category=UnknownCellMethodWarning) - - # Set the cube global attributes. - for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): - try: - cube.attributes[str(attr_name)] = attr_value - except ValueError as e: - msg = 'Skipping global attribute {!r}: {}' - warnings.warn(msg.format(attr_name, str(e))) - - - - ################################################################################ - def _get_ellipsoid(cf_grid_var): - """Return the ellipsoid definition.""" - major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) - minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) - inverse_flattening = getattr(cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None) - - # Avoid over-specification exception. - if major is not None and minor is not None: - inverse_flattening = None - - # Check for a default spherical earth. - if major is None and minor is None and inverse_flattening is None: - major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) - - return major, minor, inverse_flattening - - - ################################################################################ - def build_coordinate_system(cf_grid_var): - """Create a coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - return iris.coord_systems.GeogCS(major, minor, inverse_flattening) - - - ################################################################################ - def build_rotated_coordinate_system(engine, cf_grid_var): - """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - north_pole_latitude = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0) - north_pole_longitude = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0) - if north_pole_latitude is None or north_pole_longitude is None: - warnings.warn('Rotated pole position is not fully specified') - - north_pole_grid_lon = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0) - - ellipsoid = None - if major is not None or minor is not None or inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - - rcs = iris.coord_systems.RotatedGeogCS(north_pole_latitude, north_pole_longitude, - north_pole_grid_lon, ellipsoid) - - return rcs - - - ################################################################################ - def build_transverse_mercator_coordinate_system(engine, cf_grid_var): - """ - Create a transverse Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - scale_factor_at_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN, None) - - # The following accounts for the inconsistancy in the transverse - # mercator description within the CF spec. - if longitude_of_central_meridian is None: - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - if scale_factor_at_central_meridian is None: - scale_factor_at_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.TransverseMercator( - latitude_of_projection_origin, longitude_of_central_meridian, - false_easting, false_northing, scale_factor_at_central_meridian, - ellipsoid) - - return cs - - ################################################################################ - def build_lambert_conformal_coordinate_system(engine, cf_grid_var): - """ - Create a Lambert conformal conic coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.LambertConformal( - latitude_of_projection_origin, longitude_of_central_meridian, - false_easting, false_northing, standard_parallel, - ellipsoid) - - return cs - - ################################################################################ - def build_stereographic_coordinate_system(engine, cf_grid_var): - """ - Create a stereographic coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - # Iris currently only supports Stereographic projections with a scale - # factor of 1.0. This is checked elsewhere. - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.Stereographic( - latitude_of_projection_origin, longitude_of_projection_origin, - false_easting, false_northing, - true_scale_lat=None, - ellipsoid=ellipsoid) - - return cs - - ################################################################################ - def build_mercator_coordinate_system(engine, cf_grid_var): - """ - Create a Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - # Iris currently only supports Mercator projections with specific - # values for false_easting, false_northing, - # scale_factor_at_projection_origin and standard_parallel. These are - # checked elsewhere. - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.Mercator( - longitude_of_projection_origin, - ellipsoid=ellipsoid) - - return cs - - - ################################################################################ - def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): - """ - Create a lambert azimuthal equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.LambertAzimuthalEqualArea( - latitude_of_projection_origin, longitude_of_projection_origin, - false_easting, false_northing, ellipsoid) - - return cs - - ################################################################################ - def build_albers_equal_area_coordinate_system(engine, cf_grid_var): - """ - Create a albers conical equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - standard_parallels = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.AlbersEqualArea( - latitude_of_projection_origin, longitude_of_central_meridian, - false_easting, false_northing, standard_parallels, ellipsoid) - - return cs - - ################################################################################ - def build_vertical_perspective_coordinate_system(engine, cf_grid_var): - """ - Create a vertical perspective coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - perspective_point_height = getattr( - cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.VerticalPerspective( - latitude_of_projection_origin, longitude_of_projection_origin, - perspective_point_height, false_easting, false_northing, ellipsoid) - - return cs - - ################################################################################ - def build_geostationary_coordinate_system(engine, cf_grid_var): - """ - Create a geostationary coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - perspective_point_height = getattr( - cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - sweep_angle_axis = getattr( - cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.Geostationary( - latitude_of_projection_origin, longitude_of_projection_origin, - perspective_point_height, sweep_angle_axis, false_easting, - false_northing, ellipsoid) - - return cs - - ################################################################################ - def get_attr_units(cf_var, attributes): - attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) - if not attr_units: - attr_units = UNKNOWN_UNIT_STRING - - # Sanitise lat/lon units. - if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON: - attr_units = 'degrees' - - # Graceful loading of invalid units. - try: - cf_units.as_unit(attr_units) - except ValueError: - # Using converted unicode message. Can be reverted with Python 3. - msg = u'Ignoring netCDF variable {!r} invalid units {!r}'.format( - cf_var.cf_name, attr_units) - warnings.warn(msg) - attributes['invalid_units'] = attr_units - attr_units = UNKNOWN_UNIT_STRING - - if np.issubdtype(cf_var.dtype, np.str_): - attr_units = NO_UNIT_STRING - - if any(hasattr(cf_var.cf_data, name) for name in ("flag_values", "flag_masks", "flag_meanings")): - attr_units = cf_units._NO_UNIT_STRING - - # Get any assoicated calendar for a time reference coordinate. - if cf_units.as_unit(attr_units).is_time_reference(): - attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) - - if attr_calendar: - attr_units = cf_units.Unit(attr_units, calendar=attr_calendar) - - return attr_units - - - ################################################################################ - def get_names(cf_coord_var, coord_name, attributes): - """Determine the standard_name, long_name and var_name attributes.""" - - standard_name = getattr(cf_coord_var, CF_ATTR_STD_NAME, None) - long_name = getattr(cf_coord_var, CF_ATTR_LONG_NAME, None) - cf_name = str(cf_coord_var.cf_name) - - if standard_name is not None: - try: - standard_name = _get_valid_standard_name(standard_name) - except ValueError: - if long_name is not None: - attributes['invalid_standard_name'] = standard_name - if coord_name is not None: - standard_name = coord_name - else: - standard_name = None - else: - if coord_name is not None: - attributes['invalid_standard_name'] = standard_name - standard_name = coord_name - else: - standard_name = None - - else: - if coord_name is not None: - standard_name = coord_name - - # Last attempt to set the standard name to something meaningful. - if standard_name is None: - if cf_name in iris.std_names.STD_NAMES: - standard_name = cf_name - - return (standard_name, long_name, cf_name) - - - ################################################################################ - def get_cf_bounds_var(cf_coord_var): - """ - Return the CF variable representing the bounds of a coordinate - variable. - - """ - attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) - attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) - - # Determine bounds, prefering standard bounds over climatology. - # NB. No need to raise a warning if the bounds/climatology - # variable is missing, as that will already have been done by - # iris.fileformats.cf. - cf_bounds_var = None - climatological = False - if attr_bounds is not None: - bounds_vars = cf_coord_var.cf_group.bounds - if attr_bounds in bounds_vars: - cf_bounds_var = bounds_vars[attr_bounds] - elif attr_climatology is not None: - climatology_vars = cf_coord_var.cf_group.climatology - if attr_climatology in climatology_vars: - cf_bounds_var = climatology_vars[attr_climatology] - climatological = True - - if attr_bounds is not None and attr_climatology is not None: - warnings.warn('Ignoring climatology in favour of bounds attribute ' - 'on NetCDF variable {!r}.'.format( - cf_coord_var.cf_name)) - - return cf_bounds_var, climatological - - - ################################################################################ - def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): - """ - Return a bounds_data array with the vertex dimension as the most - rapidly varying. - - .. note:: - - This function assumes the dimension names of the coordinate - variable match those of the bounds variable in order to determine - which is the vertex dimension. - - - """ - vertex_dim_names = set(cf_bounds_var.dimensions).difference( - cf_coord_var.dimensions) - if len(vertex_dim_names) != 1: - msg = 'Too many dimension names differ between coordinate ' \ - 'variable {!r} and the bounds variable {!r}. ' \ - 'Expected 1, got {}.' - raise ValueError(msg.format(str(cf_coord_var.cf_name), - str(cf_bounds_var.cf_name), - len(vertex_dim_names))) - vertex_dim = cf_bounds_var.dimensions.index(*vertex_dim_names) - bounds_data = np.rollaxis(bounds_data.view(), vertex_dim, - len(bounds_data.shape)) - return bounds_data - - - ################################################################################ - def build_dimension_coordinate(engine, cf_coord_var, coord_name=None, coord_system=None): - """Create a dimension coordinate (DimCoord) and add it to the cube.""" - - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - attr_units = get_attr_units(cf_coord_var, attributes) - points_data = cf_coord_var[:] - # Gracefully fill points masked array. - if ma.is_masked(points_data): - points_data = ma.filled(points_data) - msg = 'Gracefully filling {!r} dimension coordinate masked points' - warnings.warn(msg.format(str(cf_coord_var.cf_name))) - - # Get any coordinate bounds. - cf_bounds_var, climatological = get_cf_bounds_var( - cf_coord_var) - if cf_bounds_var is not None: - bounds_data = cf_bounds_var[:] - # Gracefully fill bounds masked array. - if ma.is_masked(bounds_data): - bounds_data = ma.filled(bounds_data) - msg = 'Gracefully filling {!r} dimension coordinate masked bounds' - warnings.warn(msg.format(str(cf_coord_var.cf_name))) - # Handle transposed bounds where the vertex dimension is not - # the last one. Test based on shape to support different - # dimension names. - if cf_bounds_var.shape[:-1] != cf_coord_var.shape: - bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, - cf_coord_var) - else: - bounds_data = None - - # Determine whether the coordinate is circular. - circular = False - if points_data.ndim == 1 and coord_name in [CF_VALUE_STD_NAME_LON, CF_VALUE_STD_NAME_GRID_LON] \ - and cf_units.Unit(attr_units) in [cf_units.Unit('radians'), cf_units.Unit('degrees')]: - modulus_value = cf_units.Unit(attr_units).modulus - circular = iris.util._is_circular(points_data, modulus_value, bounds=bounds_data) - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the coordinate being built. - common_dims = [dim for dim in cf_coord_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) - - # Create the coordinate. - try: - coord = iris.coords.DimCoord(points_data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - bounds=bounds_data, - attributes=attributes, - coord_system=coord_system, - circular=circular, - climatological= - climatological) - except ValueError as e_msg: - # Attempt graceful loading. - coord = iris.coords.AuxCoord(points_data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - bounds=bounds_data, - attributes=attributes, - coord_system=coord_system, - climatological= - climatological) - cube.add_aux_coord(coord, data_dims) - msg = 'Failed to create {name!r} dimension coordinate: {error}\n' \ - 'Gracefully creating {name!r} auxiliary coordinate instead.' - warnings.warn(msg.format(name=str(cf_coord_var.cf_name), - error=e_msg)) - else: - # Add the dimension coordinate to the cube. - if data_dims: - cube.add_dim_coord(coord, data_dims) - else: - # Scalar coords are placed in the aux_coords container. - cube.add_aux_coord(coord, data_dims) - - # Update the coordinate to CF-netCDF variable mapping. - engine.cube_parts['coordinates'].append((coord, cf_coord_var.cf_name)) - - - ################################################################################ - def build_auxiliary_coordinate(engine, cf_coord_var, coord_name=None, coord_system=None): - """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" - - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - # Get units - attr_units = get_attr_units(cf_coord_var, attributes) - - # Get any coordinate point data. - if isinstance(cf_coord_var, cf.CFLabelVariable): - points_data = cf_coord_var.cf_label_data(cf_var) - else: - points_data = _get_cf_var_data(cf_coord_var, engine.filename) - - # Get any coordinate bounds. - cf_bounds_var, climatological = get_cf_bounds_var( - cf_coord_var) - if cf_bounds_var is not None: - bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) - - # Handle transposed bounds where the vertex dimension is not - # the last one. Test based on shape to support different - # dimension names. - if cf_bounds_var.shape[:-1] != cf_coord_var.shape: - # Resolving the data to a numpy array (i.e. *not* masked) for - # compatibility with array creators (i.e. dask) - bounds_data = np.asarray(bounds_data) - bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, - cf_coord_var) - else: - bounds_data = None - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the coordinate being built. - common_dims = [dim for dim in cf_coord_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) - - # Create the coordinate - coord = iris.coords.AuxCoord(points_data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - bounds=bounds_data, - attributes=attributes, - coord_system=coord_system, - climatological= - climatological) - - # Add it to the cube - cube.add_aux_coord(coord, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts['coordinates'].append((coord, cf_coord_var.cf_name)) - - - ################################################################################ - def build_cell_measures(engine, cf_cm_var): - """Create a CellMeasure instance and add it to the cube.""" - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - # Get units - attr_units = get_attr_units(cf_cm_var, attributes) - - # Get (lazy) content array - data = _get_cf_var_data(cf_cm_var, engine.filename) - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the coordinate being built. - common_dims = [dim for dim in cf_cm_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) - - # Obtain the cf_measure. - measure = cf_cm_var.cf_measure - - # Create the CellMeasure - cell_measure = iris.coords.CellMeasure(data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - attributes=attributes, - measure=measure) - - # Add it to the cube - cube.add_cell_measure(cell_measure, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts['cell_measures'].append((cell_measure, cf_cm_var.cf_name)) - - - - ################################################################################ - def build_ancil_var(engine, cf_av_var): - """Create an AncillaryVariable instance and add it to the cube.""" - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - # Get units - attr_units = get_attr_units(cf_av_var, attributes) - - # Get (lazy) content array - data = _get_cf_var_data(cf_av_var, engine.filename) - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the AV being built. - common_dims = [dim for dim in cf_av_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) - - # Create the AncillaryVariable - av = iris.coords.AncillaryVariable( - data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - attributes=attributes) - - # Add it to the cube - cube.add_ancillary_variable(av, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts['ancillary_variables'].append((av, cf_av_var.cf_name)) - - - - ################################################################################ - def _is_lat_lon(cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes): - """ - Determine whether the CF coordinate variable is a latitude/longitude variable. - - Ref: [CF] Section 4.1 Latitude Coordinate. - [CF] Section 4.2 Longitude Coordinate. - - """ - is_valid = False - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - - if attr_units is not None: - attr_units = attr_units.lower() - is_valid = attr_units in ud_units - - # Special case - Check for rotated pole. - if attr_units == 'degrees': - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - if attr_std_name is not None: - is_valid = attr_std_name.lower() == std_name_grid - else: - is_valid = False - # TODO: check that this interpretation of axis is correct. - attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) - if attr_axis is not None: - is_valid = attr_axis.lower() == axis_name - else: - # Alternative is to check standard_name or axis. - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - - if attr_std_name is not None: - attr_std_name = attr_std_name.lower() - is_valid = attr_std_name in [std_name, std_name_grid] - if not is_valid: - is_valid = any([attr_std_name.startswith(prefix) for prefix in prefixes]) - else: - attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) - - if attr_axis is not None: - is_valid = attr_axis.lower() == axis_name - - return is_valid - - - ################################################################################ - def is_latitude(engine, cf_name): - """Determine whether the CF coordinate variable is a latitude variable.""" - cf_var = engine.cf_var.cf_group[cf_name] - return _is_lat_lon(cf_var, UD_UNITS_LAT, CF_VALUE_STD_NAME_LAT, - CF_VALUE_STD_NAME_GRID_LAT, CF_VALUE_AXIS_Y, ['lat', 'rlat']) - - - ################################################################################ - def is_longitude(engine, cf_name): - """Determine whether the CF coordinate variable is a longitude variable.""" - cf_var = engine.cf_var.cf_group[cf_name] - return _is_lat_lon(cf_var, UD_UNITS_LON, CF_VALUE_STD_NAME_LON, - CF_VALUE_STD_NAME_GRID_LON, CF_VALUE_AXIS_X, ['lon', 'rlon']) - - - ################################################################################ - def is_projection_x_coordinate(engine, cf_name): - """ - Determine whether the CF coordinate variable is a - projection_x_coordinate variable. - - """ - cf_var = engine.cf_var.cf_group[cf_name] - attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or \ - getattr(cf_var, CF_ATTR_LONG_NAME, None) - return attr_name == CF_VALUE_STD_NAME_PROJ_X - - - ################################################################################ - def is_projection_y_coordinate(engine, cf_name): - """ - Determine whether the CF coordinate variable is a - projection_y_coordinate variable. - - """ - cf_var = engine.cf_var.cf_group[cf_name] - attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or \ - getattr(cf_var, CF_ATTR_LONG_NAME, None) - return attr_name == CF_VALUE_STD_NAME_PROJ_Y - - - ################################################################################ - def is_time(engine, cf_name): - """ - Determine whether the CF coordinate variable is a time variable. - - Ref: [CF] Section 4.4 Time Coordinate. - - """ - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - attr_axis = getattr(cf_var, CF_ATTR_AXIS, '') - try: - is_time_reference = cf_units.Unit(attr_units or 1).is_time_reference() - except ValueError: - is_time_reference = False - - return is_time_reference and (attr_std_name=='time' or attr_axis.lower()==CF_VALUE_AXIS_T) - - - ################################################################################ - def is_time_period(engine, cf_name): - """Determine whether the CF coordinate variable represents a time period.""" - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - - if attr_units is not None: - try: - is_valid = cf_units.is_time(attr_units) - except ValueError: - is_valid = False - - return is_valid - - - ################################################################################ - def is_grid_mapping(engine, cf_name, grid_mapping): - """Determine whether the CF grid mapping variable is of the appropriate type.""" - - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_mapping_name = getattr(cf_var, CF_ATTR_GRID_MAPPING_NAME, None) - - if attr_mapping_name is not None: - is_valid = attr_mapping_name.lower() == grid_mapping - - return is_valid - - - ################################################################################ - def _is_rotated(engine, cf_name, cf_attr_value): - """Determine whether the CF coordinate variable is rotated.""" - - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - - if attr_std_name is not None: - is_valid = attr_std_name.lower() == cf_attr_value - else: - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - if attr_units is not None: - is_valid = attr_units.lower() == 'degrees' - - return is_valid - - - ################################################################################ - def is_rotated_latitude(engine, cf_name): - """Determine whether the CF coodinate variable is rotated latitude.""" - return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LAT) - - - ############################################################################### - def is_rotated_longitude(engine, cf_name): - """Determine whether the CF coordinate variable is rotated longitude.""" - return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LON) - - - ################################################################################ - def has_supported_mercator_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has the supported - values for the parameters of the Mercator projection.""" - - is_valid = True - cf_grid_var = engine.cf_var.cf_group[cf_name] - - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) - - if false_easting is not None and \ - false_easting != 0: - warnings.warn('False eastings other than 0.0 not yet supported ' - 'for Mercator projections') - is_valid = False - if false_northing is not None and \ - false_northing != 0: - warnings.warn('False northings other than 0.0 not yet supported ' - 'for Mercator projections') - is_valid = False - if scale_factor_at_projection_origin is not None and \ - scale_factor_at_projection_origin != 1: - warnings.warn('Scale factors other than 1.0 not yet supported for ' - 'Mercator projections') - is_valid = False - if standard_parallel is not None and \ - standard_parallel != 0: - warnings.warn('Standard parallels other than 0.0 not yet ' - 'supported for Mercator projections') - is_valid = False - - return is_valid - - - ################################################################################ - def has_supported_stereographic_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has a value of 1.0 - for the scale_factor_at_projection_origin attribute.""" - - is_valid = True - cf_grid_var = engine.cf_var.cf_group[cf_name] - - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None) - - if scale_factor_at_projection_origin is not None and \ - scale_factor_at_projection_origin != 1: - warnings.warn('Scale factors other than 1.0 not yet supported for ' - 'stereographic projections') - is_valid = False - - return is_valid - - - ################################################################################ - def _parse_cell_methods(cf_var_name, nc_cell_methods): - """Parse the CF cell_methods attribute string.""" - - cell_methods = [] - if nc_cell_methods is not None: - for m in CM_PARSE.finditer(nc_cell_methods): - d = m.groupdict() - method = d[CM_METHOD] - method = method.strip() - # Check validity of method, allowing for multi-part methods - # e.g. mean over years. - method_words = method.split() - if method_words[0].lower() not in CM_KNOWN_METHODS: - msg = 'NetCDF variable {!r} contains unknown cell ' \ - 'method {!r}' - warnings.warn(msg.format('{}'.format(cf_var_name), - '{}'.format(method_words[0]))) - d[CM_METHOD] = method - name = d[CM_NAME] - name = name.replace(' ', '') - name = name.rstrip(':') - d[CM_NAME] = tuple([n for n in name.split(':')]) - interval = [] - comment = [] - if d[CM_EXTRA] is not None: - # - # tokenise the key words and field colon marker - # - d[CM_EXTRA] = d[CM_EXTRA].replace('comment:', '<><<:>>') - d[CM_EXTRA] = d[CM_EXTRA].replace('interval:', '<><<:>>') - d[CM_EXTRA] = d[CM_EXTRA].split('<<:>>') - if len(d[CM_EXTRA]) == 1: - comment.extend(d[CM_EXTRA]) - else: - next_field_type = comment - for field in d[CM_EXTRA]: - field_type = next_field_type - index = field.rfind('<>') - if index == 0: - next_field_type = interval - continue - elif index > 0: - next_field_type = interval - else: - index = field.rfind('<>') - if index == 0: - next_field_type = comment - continue - elif index > 0: - next_field_type = comment - if index != -1: - field = field[:index] - field_type.append(field.strip()) - # - # cater for a shared interval over multiple axes - # - if len(interval): - if len(d[CM_NAME]) != len(interval) and len(interval) == 1: - interval = interval*len(d[CM_NAME]) - # - # cater for a shared comment over multiple axes - # - if len(comment): - if len(d[CM_NAME]) != len(comment) and len(comment) == 1: - comment = comment*len(d[CM_NAME]) - d[CM_INTERVAL] = tuple(interval) - d[CM_COMMENT] = tuple(comment) - cell_methods.append(iris.coords.CellMethod(d[CM_METHOD], coords=d[CM_NAME], intervals=d[CM_INTERVAL], comments=d[CM_COMMENT])) - return tuple(cell_methods) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 8f40131e54..14dbab8054 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -25,7 +25,6 @@ import netCDF4 import numpy as np import numpy.ma as ma -from pyke import knowledge_engine from iris._lazy_data import as_lazy_data from iris.aux_factory import ( @@ -41,17 +40,13 @@ import iris.coord_systems import iris.coords import iris.exceptions -import iris.fileformats._pyke_rules import iris.fileformats.cf +import iris.io import iris.util -# Show Pyke inference engine statistics. +# Show actions activation statistics. DEBUG = False -# Pyke CF related file names. -_PYKE_RULE_BASE = "fc_rules_cf" -_PYKE_FACT_BASE = "facts_cf" - # Standard CML spatio-temporal axis names. SPATIO_TEMPORAL_AXES = ["t", "z", "y", "x"] @@ -381,34 +376,13 @@ def coord(self, name): return result -def _pyke_kb_engine(): - """Return the PyKE knowledge engine for CF->cube conversion.""" - - pyke_dir = os.path.join(os.path.dirname(__file__), "_pyke_rules") - compile_dir = os.path.join(pyke_dir, "compiled_krb") - engine = None - - if os.path.exists(compile_dir): - tmpvar = [ - os.path.getmtime(os.path.join(compile_dir, fname)) - for fname in os.listdir(compile_dir) - if not fname.startswith("_") - ] - if tmpvar: - oldest_pyke_compile_file = min(tmpvar) - rule_age = os.path.getmtime( - os.path.join(pyke_dir, _PYKE_RULE_BASE + ".krb") - ) - - if oldest_pyke_compile_file >= rule_age: - # Initialise the pyke inference engine. - engine = knowledge_engine.engine( - (None, "iris.fileformats._pyke_rules.compiled_krb") - ) - - if engine is None: - engine = knowledge_engine.engine(iris.fileformats._pyke_rules) +def _actions_engine(): + # Return an 'actions engine', which provides a pyke-rules-like interface to + # the core cf translation code. + # Deferred import to avoid circularity. + import iris.fileformats._nc_load_rules.engine as nc_actions_engine + engine = nc_actions_engine.Engine() return engine @@ -455,85 +429,78 @@ def __setstate__(self, state): def _assert_case_specific_facts(engine, cf, cf_group): - # Initialise pyke engine "provides" hooks. - # These are used to patch non-processed element attributes after rules activation. + # Initialise a data store for built cube elements. + # This is used to patch element attributes *not* setup by the actions + # process, after the actions code has run. engine.cube_parts["coordinates"] = [] engine.cube_parts["cell_measures"] = [] engine.cube_parts["ancillary_variables"] = [] # Assert facts for CF coordinates. for cf_name in cf_group.coordinates.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "coordinate", (cf_name,) - ) + engine.add_case_specific_fact("coordinate", (cf_name,)) # Assert facts for CF auxiliary coordinates. for cf_name in cf_group.auxiliary_coordinates.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "auxiliary_coordinate", (cf_name,) - ) + engine.add_case_specific_fact("auxiliary_coordinate", (cf_name,)) # Assert facts for CF cell measures. for cf_name in cf_group.cell_measures.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "cell_measure", (cf_name,) - ) + engine.add_case_specific_fact("cell_measure", (cf_name,)) # Assert facts for CF ancillary variables. for cf_name in cf_group.ancillary_variables.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "ancillary_variable", (cf_name,) - ) + engine.add_case_specific_fact("ancillary_variable", (cf_name,)) # Assert facts for CF grid_mappings. for cf_name in cf_group.grid_mappings.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "grid_mapping", (cf_name,) - ) + engine.add_case_specific_fact("grid_mapping", (cf_name,)) # Assert facts for CF labels. for cf_name in cf_group.labels.keys(): - engine.add_case_specific_fact(_PYKE_FACT_BASE, "label", (cf_name,)) + engine.add_case_specific_fact("label", (cf_name,)) # Assert facts for CF formula terms associated with the cf_group # of the CF data variable. - formula_root = set() + + # Collect varnames of formula-root variables as we go. + # NOTE: use dictionary keys as an 'OrderedSet' + # - see: https://stackoverflow.com/a/53657523/2615050 + # This is to ensure that we can handle the resulting facts in a definite + # order, as using a 'set' led to indeterminate results. + formula_root = {} for cf_var in cf.cf_group.formula_terms.values(): for cf_root, cf_term in cf_var.cf_terms_by_root.items(): # Only assert this fact if the formula root variable is # defined in the CF group of the CF data variable. if cf_root in cf_group: - formula_root.add(cf_root) + formula_root[cf_root] = True engine.add_case_specific_fact( - _PYKE_FACT_BASE, "formula_term", (cf_var.cf_name, cf_root, cf_term), ) - for cf_root in formula_root: - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "formula_root", (cf_root,) - ) + for cf_root in formula_root.keys(): + engine.add_case_specific_fact("formula_root", (cf_root,)) -def _pyke_stats(engine, cf_name): - if DEBUG: - print("-" * 80) - print("CF Data Variable: %r" % cf_name) +def _actions_activation_stats(engine, cf_name): + print("-" * 80) + print("CF Data Variable: %r" % cf_name) - engine.print_stats() + engine.print_stats() - print("Rules Triggered:") + print("Rules Triggered:") - for rule in sorted(list(engine.rule_triggered)): - print("\t%s" % rule) + for rule in sorted(list(engine.rule_triggered)): + print("\t%s" % rule) - print("Case Specific Facts:") - kb_facts = engine.get_kb(_PYKE_FACT_BASE) + print("Case Specific Facts:") + kb_facts = engine.get_kb() - for key in kb_facts.entity_lists.keys(): - for arg in kb_facts.entity_lists[key].case_specific_facts: - print("\t%s%s" % (key, arg)) + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) def _set_attributes(attributes, key, value): @@ -581,6 +548,23 @@ def _get_cf_var_data(cf_var, filename): return as_lazy_data(proxy, chunks=chunks) +class OrderedAddableList(list): + # Used purely in actions debugging, to accumulate a record of which actions + # were activated. + # It replaces a set, so as to record the ordering of operations, with + # possible repeats, and it also numbers the entries. + # Actions routines invoke the 'add' method, which thus effectively converts + # a set.add into a list.append. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._n_add = 0 + + def add(self, msg): + self._n_add += 1 + n_add = self._n_add + self.append(f"#{n_add:03d} : {msg}") + + def _load_cube(engine, cf, cf_var, filename): from iris.cube import Cube @@ -588,22 +572,26 @@ def _load_cube(engine, cf, cf_var, filename): data = _get_cf_var_data(cf_var, filename) cube = Cube(data) - # Reset the pyke inference engine. + # Reset the actions engine. engine.reset() - # Initialise pyke engine rule processing hooks. + # Initialise engine rule processing hooks. engine.cf_var = cf_var engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = set() + engine.rule_triggered = OrderedAddableList() engine.filename = filename - # Assert any case-specific facts. + # Assert all the case-specific facts. + # This extracts 'facts' specific to this data-variable (aka cube), from + # the info supplied in the CFGroup object. _assert_case_specific_facts(engine, cf, cf_var.cf_group) - # Run pyke inference engine with forward chaining rules. - engine.activate(_PYKE_RULE_BASE) + # Run the actions engine. + # This creates various cube elements and attaches them to the cube. + # It also records various other info on the engine, to be processed later. + engine.activate() # Having run the rules, now populate the attributes of all the cf elements with the # "unused" attributes from the associated CF-netCDF variable. @@ -650,8 +638,9 @@ def fix_attributes_all_elements(role_name): for method in cube.cell_methods ] - # Show pyke session statistics. - _pyke_stats(engine, cf_var.cf_name) + if DEBUG: + # Show activation statistics for this data-var (i.e. cube). + _actions_activation_stats(engine, cf_var.cf_name) return cube @@ -790,8 +779,8 @@ def load_cubes(filenames, callback=None): """ from iris.io import run_callback - # Initialise the pyke inference engine. - engine = _pyke_kb_engine() + # Create an actions engine. + engine = _actions_engine() if isinstance(filenames, str): filenames = [filenames] diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 5a89bf5e23..01f6f777fa 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -110,7 +110,6 @@ def test_license_headers(self): "docs/src/userguide/regridding_plots/*.py", "docs/src/_build/*", "lib/iris/analysis/_scipy_interpolate.py", - "lib/iris/fileformats/_pyke_rules/*", ) try: diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 4d92274fcf..36e06202d1 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -30,7 +30,7 @@ import iris.analysis.trajectory import iris.coord_systems as icoord_systems from iris.coords import AncillaryVariable, CellMeasure -import iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc as pyke_rules +from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names @@ -522,13 +522,19 @@ def test_lat_lon_major_minor(self): minor = 63567523 self.grid.semi_major_axis = major self.grid.semi_minor_axis = minor - crs = pyke_rules.build_coordinate_system(self.grid) + # NB 'build_coordinate_system' has an extra (unused) 'engine' arg, just + # so that it has the same signature as other coord builder routines. + engine = None + crs = ncload_helpers.build_coordinate_system(engine, self.grid) self.assertEqual(crs, icoord_systems.GeogCS(major, minor)) def test_lat_lon_earth_radius(self): earth_radius = 63700000 self.grid.earth_radius = earth_radius - crs = pyke_rules.build_coordinate_system(self.grid) + # NB 'build_coordinate_system' has an extra (unused) 'engine' arg, just + # so that it has the same signature as other coord builder routines. + engine = None + crs = ncload_helpers.build_coordinate_system(engine, self.grid) self.assertEqual(crs, icoord_systems.GeogCS(earth_radius)) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py similarity index 72% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py index a8093f5c8c..2ea22c420b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py @@ -4,6 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the :mod:`iris.fileformats._pyke_rules.compiled_krb` module. +Unit tests for the module +:mod:`iris.fileformats.netcdf._nc_load_rules` . """ diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py new file mode 100644 index 0000000000..717e5b5c41 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -0,0 +1,157 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. + +This module provides the engine.activate() call used in the function +`iris.fileformats.netcdf._load_cube`. + +""" +from pathlib import Path +import shutil +import subprocess +import tempfile + +import iris.fileformats._nc_load_rules.engine +from iris.fileformats.cf import CFReader +import iris.fileformats.netcdf +from iris.fileformats.netcdf import _load_cube + +""" +Notes on testing method. + +IN cf : "def _load_cube(engine, cf, cf_var, filename)" +WHERE: + - engine is a :class:`iris.fileformats._nc_load_rules.engine.Engine` + - cf is a :class:`iris.fileformats.cf.CFReader` + - cf_var is a :class:`iris.fileformats.cf.CFDataVariable` + +As it's hard to construct a suitable CFReader from scratch, it would seem +simpler (for now) to use an ACTUAL FILE. +Likewise, the easiest approach to that is with CDL and "ncgen". +To do this, we need a test "fixture" that can create suitable test files in a +temporary directory. + +""" + + +class Mixin__nc_load_actions: + """ + Class to make testcases for rules or actions code, and check results. + + Defines standard setUpClass/tearDownClass methods, to create a temporary + directory for intermediate files. + NOTE: owing to peculiarities of unittest, these must be explicitly called + from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the + actual Test_XXX class which also inherits unittest.TestCase. + + Testcases are manufactured by the '_make_testcase_cdl' method. + The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes + a result cube (by: producing cdl, converting to netcdf, and loading the + 'phenom' variable only). + Likewise, a generalised 'check_result' method will be used to perform result + checking. + Both '_make_testcase_cdl' and 'check_result' are not defined here : They + are to be variously implemented by the inheritors. + + """ + + # "global" test setting : whether to output various debug info + debug = False + + @classmethod + def setUpClass(cls): + # Create a temp directory for temp files. + cls.temp_dirpath = Path(tempfile.mkdtemp()) + + @classmethod + def tearDownClass(cls): + # Destroy a temp directory for temp files. + shutil.rmtree(cls.temp_dirpath) + + def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): + """ + Load the 'phenom' data variable in a CDL testcase, as a cube. + + Using ncgen, CFReader and the _load_cube call. + + """ + # Write the CDL to a file. + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + + # Create a netCDF file from the CDL file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + subprocess.check_call(command, shell=True) + + # Simulate the inner part of the file reading process. + cf = CFReader(nc_path) + # Grab a data variable : FOR NOW always grab the 'phenom' variable. + cf_var = cf.cf_group.data_variables["phenom"] + + engine = iris.fileformats.netcdf._actions_engine() + + # If debug enabled, switch on the activation summary debug output. + # Use 'patch' so it is restored after the test. + self.patch("iris.fileformats.netcdf.DEBUG", self.debug) + + # Call the main translation function to load a single cube. + # _load_cube establishes per-cube facts, activates rules and + # produces an actual cube. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Also Record, on the cubes, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") + + # Always returns a single cube. + return cube + + def run_testcase(self, warning=None, **testcase_kwargs): + """ + Run a testcase with chosen options, returning a test cube. + + The kwargs apply to the '_make_testcase_cdl' method. + + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = cdl_path.replace(".cdl", ".nc") + + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + if self.debug: + print("CDL file content:") + print(cdl_string) + print("------\n") + + if warning is None: + context = self.assertNoWarningsRegexp() + else: + context = self.assertWarnsRegexp(warning) + with context: + cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) + + if self.debug: + print("\nCube:") + print(cube) + print("") + return cube + + def _make_testcase_cdl(self, **kwargs): + """Make a testcase CDL string.""" + # Override for specific uses... + raise NotImplementedError() + + def check_result(self, cube, **kwargs): + """Test a result cube.""" + # Override for specific uses... + raise NotImplementedError() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py new file mode 100644 index 0000000000..a2ecdf1490 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -0,0 +1,908 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Here, *specifically* testcases relating to grid-mappings and dim-coords. + +""" +import iris.tests as tests # isort: skip + +import iris.coord_systems as ics +import iris.fileformats._nc_load_rules.helpers as hh +from iris.tests.unit.fileformats.nc_load_rules.actions import ( + Mixin__nc_load_actions, +) + + +class Mixin__grid_mapping(Mixin__nc_load_actions): + # Testcase support routines for testing translation of grid-mappings + def _make_testcase_cdl( + self, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + mapping_missingradius=False, + mapping_type_name=None, + mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, + xco_is_dim=True, + yco_is_dim=True, + ): + """ + Create a CDL string for a testcase. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + + """ + # The grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". + if mapping_type_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" + # Special kwarg overrides some of the values. + if latitude_units is None: + yco_units_default = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units_default = latitude_units + + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" + + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default + + phenom_auxcoord_names = [] + if xco_is_dim: + # xdim has same name as xco, making xco a dim-coord + xdim_name = "xco" + else: + # use alternate dim-name, and put xco on the 'coords' list + # This makes the X coord an aux-coord + xdim_name = "xdim_altname" + phenom_auxcoord_names.append("xco") + if yco_is_dim: + # ydim has same name as yco, making yco a dim-coord + ydim_name = "yco" # This makes the Y coord a dim-coord + else: + # use alternate dim-name, and put yco on the 'coords' list + # This makes the Y coord an aux-coord + ydim_name = "ydim_altname" + phenom_auxcoord_names.append("yco") + + # Build a 'phenom:coords' string if needed. + if phenom_auxcoord_names: + phenom_coords_string = " ".join(phenom_auxcoord_names) + phenom_coords_string = f""" + phenom:coordinates = "{phenom_coords_string}" ; +""" + else: + phenom_coords_string = "" + + grid_mapping_name = "grid" + # Options can override the gridvar name and properties. + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. + g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. + if mapping_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_type_name}"; + {g_radius_string} + """ + + # Add a specified scale-factor, if requested. + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ + + # + # Add various additional (minimal) required properties for different + # grid mapping types. + # + + # Those which require 'latitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + + # Construct the total CDL string + cdl_string = f""" + netcdf test {{ + dimensions: + {ydim_name} = 2 ; + {xdim_name} = 3 ; + variables: + double phenom({ydim_name}, {xdim_name}) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; +{phenom_coords_string} + double yco({ydim_name}) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco({xdim_name}) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} + data: + yco = {yco_values_string} ; + xco = 100., 110., 120. ; + }} + """ + return cdl_string + + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + xco_is_aux=False, + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, + ): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + expected_dim_coords = [] + expected_aux_coords = [] + if yco_is_aux: + expected_aux_coords += y_coords + else: + expected_dim_coords += y_coords + if xco_is_aux: + expected_aux_coords += x_coords + else: + expected_dim_coords += x_coords + + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + + cube_cs = cube.coord_system() + if cube_no_xycoords: + yco_cs = None + xco_cs = None + else: + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) + else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) + else: + self.assertEqual(yco_cs, cube_cs) + + +class Test__grid_mapping(Mixin__grid_mapping, tests.IrisTest): + # Various testcases for translation of grid-mappings + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_basic_latlon(self): + # A basic reference example with a lat-long grid. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) + # Notes: + # * grid-mapping identified : regular latlon + # * dim-coords identified : lat+lon + # * coords built : standard latlon (with latlon coord-system) + result = self.run_testcase() + self.check_result(result) + + def test_missing_latlon_radius(self): + # Lat-long with a missing earth-radius causes an error. + # One of very few cases where activation may encounter an error. + # N.B. doesn't really test rules-activation, but maybe worth doing. + # (no rules trigger) + with self.assertRaisesRegex(ValueError, "No ellipsoid"): + self.run_testcase(mapping_missingradius=True) + + def test_bad_gridmapping_nameproperty(self): + # Fix the 'grid' var so it does not register as a grid-mapping. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping --FAILED(no grid-mapping attr) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude)(no-cs) + # 006 : fc_build_coordinate_(longitude)(no-cs) + # Notes: + # * grid-mapping identified : NONE (thus, no coord-system) + # * dim-coords identified : lat+lon + # * coords built : lat+lon coords, with NO coord-system + result = self.run_testcase(gridmapvar_mappropertyname="mappy") + self.check_result(result, cube_no_cs=True) + + def test_latlon_bad_gridmapping_varname(self): + # rename the grid-mapping variable so it is effectively 'missing' + # (I.E. the var named in "data-variable:grid_mapping" does not exist). + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(latitude) + # 003 : fc_provides_coordinate_(longitude) + # 004 : fc_build_coordinate_(latitude)(no-cs) + # 005 : fc_build_coordinate_(longitude)(no-cs) + # Notes: + # * behaviours all the same as 'test_bad_gridmapping_nameproperty' + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase(warning=warning, gridmapvar_name="grid_2") + self.check_result(result, cube_no_cs=True) + + def test_latlon_bad_latlon_unit(self): + # Check with bad latitude units : 'degrees' in place of 'degrees_north'. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(miscellaneous) + # 006 : fc_build_coordinate_(longitude) + # Notes: + # * grid-mapping identified : regular latlon + # * dim-coords identified : + # x is regular longitude dim-coord + # y is 'default' coord ==> builds as an 'extra' dim-coord + # * coords built : + # x(lon) is regular latlon with coord-system + # y(lat) is a dim-coord, but with NO coord-system + # * additional : + # "fc_provides_coordinate_latitude" did not trigger, + # because it is not a valid latitude coordinate. + result = self.run_testcase(latitude_units="degrees") + self.check_result(result, yco_no_cs=True) + + def test_mapping_rotated(self): + # Test with rotated-latlon grid-mapping + # Distinct from both regular-latlon and non-latlon cases, as the + # coordinate standard names and units are different. + # ('_make_testcase_cdl' and 'check_result' know how to handle that). + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_provides_coordinate_(rotated_longitude) + # 005 : fc_build_coordinate_(rotated_latitude)(rotated) + # 006 : fc_build_coordinate_(rotated_longitude)(rotated) + # Notes: + # * grid-mapping identified : rotated lat-lon + # * dim-coords identified : lat+lon + # * coords built: lat+lon coords ROTATED, with coord-system + # - "rotated" means that they have a different name + units + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON + ) + self.check_result(result, cube_cstype=ics.RotatedGeogCS) + + # + # All non-latlon coordinate systems ... + # These all have projection-x/y coordinates with units of metres. + # They all work the same way, except that Mercator/Stereographic have + # parameter checking routines that can fail. + # NOTE: various mapping types *require* certain addtional properties + # - without which an error will occur during translation. + # - run_testcase/_make_testcase_cdl know how to provide these + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_() + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y) + # 006 : fc_build_coordinate_(projection_x) + # Notes: + # * grid-mapping identified : + # * dim-coords identified : projection__coordinate + # * coords built : projection__coordinate, with coord-system + def test_mapping_albers(self): + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_ALBERS) + self.check_result(result, cube_cstype=ics.AlbersEqualArea) + + def test_mapping_geostationary(self): + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_GEOSTATIONARY + ) + self.check_result(result, cube_cstype=ics.Geostationary) + + def test_mapping_lambert_azimuthal(self): + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL + ) + self.check_result(result, cube_cstype=ics.LambertAzimuthalEqualArea) + + def test_mapping_lambert_conformal(self): + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL + ) + self.check_result(result, cube_cstype=ics.LambertConformal) + + def test_mapping_mercator(self): + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR + ) + self.check_result(result, cube_cstype=ics.Mercator) + + def test_mapping_mercator__fail_unsupported(self): + # Provide a mercator grid-mapping with a non-unity scale factor, which + # we cannot handle. + # Result : fails to convert into a coord-system, and emits a warning. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(mercator) --(FAILED check has_supported_mercator_parameters) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) + # Notes: + # * grid-mapping identified : NONE + # * dim-coords identified : proj-x and -y + # * coords built : NONE (no dim or aux coords: cube has no coords) + warning = "not yet supported for Mercator" + result = self.run_testcase( + warning=warning, + mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR, + mapping_scalefactor=2.0, + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_stereographic(self): + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_STEREO) + self.check_result(result, cube_cstype=ics.Stereographic) + + def test_mapping_stereographic__fail_unsupported(self): + # As for 'test_mapping_mercator__fail_unsupported', provide a non-unity + # scale factor, which we cannot handle. + # Result : fails to convert into a coord-system, and emits a warning. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(stereographic) --(FAILED check has_supported_stereographic_parameters) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) + # Notes: + # as for 'mercator__fail_unsupported', above + warning = "not yet supported for stereographic" + result = self.run_testcase( + warning=warning, + mapping_type_name=hh.CF_GRID_MAPPING_STEREO, + mapping_scalefactor=2.0, + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_transverse_mercator(self): + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_TRANSVERSE + ) + self.check_result(result, cube_cstype=ics.TransverseMercator) + + def test_mapping_vertical_perspective(self): + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_VERTICAL + ) + self.check_result(result, cube_cstype=ics.VerticalPerspective) + + def test_mapping_unsupported(self): + # Use azimuthal, which is a real thing but we don't yet support it. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping --FAILED(unhandled type azimuthal_equidistant) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) + # Notes: + # * NO grid-mapping is identified (or coord-system built) + # * There is no warning for this : it fails silently. + # TODO: perhaps there _should_ be a warning in such cases ? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_undefined(self): + # Use a random, unknown "mapping type". + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping --FAILED(unhandled type unknown) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) + # Notes: + # * There is no warning for this : it fails silently. + # TODO: perhaps there _should_ be a warning in such cases ? + result = self.run_testcase(mapping_type_name="unknown") + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + # + # Cases where names(+units) of coords don't match the grid-mapping type. + # Effectively, there are 9 possibilities for (latlon/rotated/projected) + # coords mismatched to (latlon/rotated/projected/missing) coord-systems. + # + # N.B. the results are not all the same : + # + # 1. when a coord and the grid-mapping have the same 'type', + # i.e. plain-latlon, rotated-latlon or non-latlon, then dim-coords are + # built with a coord-system (as seen previously). + # 2. when there is no grid-mapping, we can build coords of any type, + # but with no coord-system. + # 3. when one of (coord + grid-mapping) is plain-latlon or rotated-latlon, + # and the other is non-latlon (i.e. any other type), + # then we build coords *without* a coord-system + # 4. when one of (coord + grid-mapping) is plain-latlon, and the other is + # rotated-latlon, we don't build coords at all. + # TODO: it's not clear why this needs to behave differently from case + # (3.) : possibly, these two should be made consistent. + # + # TODO: *all* these 'mismatch' cases should probably generate warnings, + # except for plain-latlon coords with no grid-mapping. + # At present, we _only_ warn when an expected grid-mapping is absent. + # + + def test_mapping__mismatch__latlon_coords_rotated_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude)(FAILED : latlon coord with rotated cs) + # 006 : fc_build_coordinate_(longitude)(FAILED : latlon coord with rotated cs) + # Notes: + # * coords built : NONE (see above) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping__mismatch__latlon_coords_nonll_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(albers_conical_equal_area) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude)(no-cs : discarded projected cs) + # 006 : fc_build_coordinate_(longitude)(no-cs : discarded projected cs) + # Notes: + # * coords built : lat + lon, with no coord-system (see above) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__latlon_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(latitude) + # 003 : fc_provides_coordinate_(longitude) + # 004 : fc_build_coordinate_(latitude)(no-cs) + # 005 : fc_build_coordinate_(longitude)(no-cs) + # Notes: + # * coords built : lat + lon, with no coord-system (see above) + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__rotated_coords_latlon_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_provides_coordinate_(rotated_longitude) + # 005 : fc_build_coordinate_(rotated_latitude)(FAILED rotated coord with latlon cs) + # 006 : fc_build_coordinate_(rotated_longitude)(FAILED rotated coord with latlon cs) + # Notes: + # * coords built : NONE (see above) + result = self.run_testcase( + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping__mismatch__rotated_coords_nonll_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(albers_conical_equal_area) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_provides_coordinate_(rotated_longitude) + # 005 : fc_build_coordinate_(rotated_latitude)(rotated no-cs : discarded projected cs) + # 006 : fc_build_coordinate_(rotated_longitude)(rotated no-cs : discarded projected cs) + # Notes: + # * coords built : rotated-lat + lon, with no coord-system (see above) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__rotated_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(rotated_latitude) + # 003 : fc_provides_coordinate_(rotated_longitude) + # 004 : fc_build_coordinate_(rotated_latitude)(rotated no-cs) + # 005 : fc_build_coordinate_(rotated_longitude)(rotated no-cs) + # Notes: + # * coords built : rotated lat + lon, with no coord-system (see above) + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__nonll_coords_latlon_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_default_coordinate_(provide-phase) + # 005 : fc_build_coordinate_(miscellaneous) + # 006 : fc_build_coordinate_(miscellaneous) + # Notes: + # * coords built : projection x + y, with no coord-system (see above) + # * the coords build as "default" type : they have no standard-name + result = self.run_testcase( + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + def test_mapping__mismatch__nonll_coords_rotated_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_default_coordinate_(provide-phase) + # 005 : fc_build_coordinate_(miscellaneous) + # 006 : fc_build_coordinate_(miscellaneous) + # Notes: + # * as previous case '__mismatch__nonll_' + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + def test_mapping__mismatch__nonll_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_default_coordinate_(provide-phase) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_build_coordinate_(miscellaneous) + # 005 : fc_build_coordinate_(miscellaneous) + # Notes: + # * effectively, just like previous 2 cases + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + +class Test__aux_latlons(Mixin__grid_mapping, tests.IrisTest): + # Testcases for translating auxiliary latitude+longitude variables + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_aux_lon(self): + # Change the name of xdim, and put xco on the coords list. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_build_coordinate_(latitude) + # 005 : fc_build_auxiliary_coordinate_longitude + result = self.run_testcase(xco_is_dim=False) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + def test_aux_lat(self): + # As previous, but with the Y coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(longitude) + # 004 : fc_build_coordinate_(longitude) + # 005 : fc_build_auxiliary_coordinate_latitude + result = self.run_testcase(yco_is_dim=False) + self.check_result(result, yco_is_aux=True, yco_no_cs=True) + + def test_aux_lat_and_lon(self): + # Make *both* X and Y coords into aux-coords. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_build_auxiliary_coordinate_longitude + # 004 : fc_build_auxiliary_coordinate_latitude + # Notes: + # * a grid-mapping is recognised, but discarded, as in this case + # there are no dim-coords to reference it. + result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) + self.check_result( + result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True + ) + + def test_aux_lon_rotated(self): + # Rotated-style lat + lon coords, X is an aux-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_build_coordinate_(rotated_latitude)(rotated) + # 005 : fc_build_auxiliary_coordinate_longitude_rotated + # Notes: + # * as the plain-latlon case 'test_aux_lon'. + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_is_dim=False, + ) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + def test_aux_lat_rotated(self): + # Rotated-style lat + lon coords, Y is an aux-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_longitude) + # 004 : fc_build_coordinate_(rotated_longitude)(rotated) + # 005 : fc_build_auxiliary_coordinate_latitude_rotated + # Notes: + # * as the plain-latlon case 'test_aux_lat'. + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + yco_is_dim=False, + ) + self.check_result(result, yco_is_aux=True, yco_no_cs=True) + + +class Test__nondimcoords(Mixin__grid_mapping, tests.IrisTest): + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_nondim_lats(self): + # Fix a coord's values so it cannot be a dim-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) + # Notes: + # * in terms of rule triggering, this is not distinct from the + # "normal" case : but latitude is now created as an aux-coord. + warning = "must be.* monotonic" + result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + self.check_result(result, yco_is_aux=True) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py new file mode 100644 index 0000000000..f9a11ba403 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -0,0 +1,281 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Test rules activation relating to hybrid vertical coordinates. + +""" +import iris.tests as tests # isort: skip + +import iris.fileformats._nc_load_rules.helpers as hh +from iris.tests.unit.fileformats.nc_load_rules.actions import ( + Mixin__nc_load_actions, +) + + +class Test__formulae_tests(Mixin__nc_load_actions, tests.IrisTest): + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def _make_testcase_cdl( + self, formula_root_name=None, term_names=None, extra_formula_type=None + ): + """Construct a testcase CDL for data with hybrid vertical coords.""" + if formula_root_name is None: + formula_root_name = "atmosphere_hybrid_height_coordinate" + if term_names is None: + term_names = hh.CF_COORD_VERTICAL.get(formula_root_name) + if term_names is None: + # unsupported type : just make something up + term_names = ["term1"] + + # Arrange to create additional term variables for an 'extra' hybrid + # formula, if requested. + if extra_formula_type is None: + term_names_extra = [] + phenom_coord_names = ["vert"] # always include the root variable + else: + phenom_coord_names = ["vert", "vert_2"] # two formula coords + term_names_extra = hh.CF_COORD_VERTICAL.get(extra_formula_type) + + # Build strings to define term variables. + formula_term_strings = [] + extra_formula_term_strings = [] + terms_string = "" + for term_name in term_names + term_names_extra: + term_varname = "v_" + term_name + # Include in the phenom coordinates list. + phenom_coord_names.append(term_varname) + term_string = f"{term_name}: {term_varname}" + if term_name in term_names: + # Include in the 'main' terms list. + formula_term_strings.append(term_string) + else: + # Include in the 'extra' terms list. + extra_formula_term_strings.append(term_string) + terms_string += f""" + double {term_varname}(h) ; + {term_varname}:long_name = "{term_name}_long_name" ; + {term_varname}:units = "m" ; +""" + + # Construct the reference strings. + phenom_coords_string = " ".join(phenom_coord_names) + formula_terms_string = " ".join(formula_term_strings) + extra_formula_terms_string = " ".join(extra_formula_term_strings) + + # Construct the 'extra' hybrid coord if requested. + if extra_formula_type is None: + extra_formula_string = "" + else: + # Create the lines to add an 'extra' formula. + # For now, put this on the same dim : makes no difference. + extra_formula_string = f""" + double vert_2(h) ; + vert_2:standard_name = "{extra_formula_type}" ; + vert_2:units = "m" ; + vert_2:formula_terms = "{extra_formula_terms_string}" ; +""" + + # Create the main result string. + cdl_str = f""" +netcdf test {{ +dimensions: + h = 2 ; +variables: + double phenom(h) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:coordinates = "{phenom_coords_string}" ; + double vert(h) ; + vert:standard_name = "{formula_root_name}" ; + vert:long_name = "hybrid_vertical" ; + vert:units = "m" ; + vert:formula_terms = "{formula_terms_string}" ; +{terms_string} +{extra_formula_string} +}} +""" + return cdl_str + + def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): + """Check the result of a cube load with a hybrid vertical coord.""" + if factory_type == "_auto": + # replace with our 'default', which is hybrid-height. + # N.B. 'None' is different: it means expect *no* factory. + factory_type = "atmosphere_hybrid_height_coordinate" + self.assertEqual(cube._formula_type_name, factory_type) + + if formula_terms == "_auto": + # Set default terms-expected, according to the expected factory + # type. + if factory_type is None: + # If no factory, expect no identified terms. + formula_terms = [] + else: + # Expect the correct ones defined for the factory type. + formula_terms = hh.CF_COORD_VERTICAL[factory_type] + + # Compare the formula_terms list with the 'expected' ones. + # N.B. first make the 'expected' list lower case, as the lists in + # hh.CF_COORD_VERTICAL include uppercase, but rules outputs don't. + formula_terms = [term.lower() for term in formula_terms] + + # N.B. the terms dictionary can be missing, if there were none + actual_terms = cube._formula_terms_byname or {} + self.assertEqual(sorted(formula_terms), sorted(actual_terms.keys())) + + # Check that there is an aux-coord of the expected name for each term + for var_name in actual_terms.values(): + coords = cube.coords(var_name=var_name, dim_coords=False) + self.assertEqual(len(coords), 1) + + # + # Actual testcase routines + # + + def test_basic_hybridheight(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_build_auxiliary_coordinate + # 008 : fc_formula_type_atmosphere_hybrid_height_coordinate + # 009 : fc_formula_term(a) + # 010 : fc_formula_term(b) + # 011 : fc_formula_term(orog) + result = self.run_testcase() + self.check_result(result) + + def test_missing_term(self): + # Check behaviour when a term is missing. + # For the test, omit "orography", which is common in practice. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 007 : fc_formula_type_atmosphere_hybrid_height_coordinate + # 008 : fc_formula_term(a) + # 009 : fc_formula_term(b) + result = self.run_testcase( + term_names=["a", "b"] # missing the 'orog' term + ) + self.check_result(result, formula_terms=["a", "b"]) + + def test_no_terms(self): + # Check behaviour when *all* terms are missing. + # N.B. for any _actual_ type, this is probably invalid and would fail? + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + result = self.run_testcase( + formula_root_name="atmosphere_hybrid_height_coordinate", + term_names=[], + ) + # This does *not* trigger + # 'fc_formula_type_atmosphere_hybrid_height_coordinate' + # This is because, within the 'assert_case_specific_facts' routine, + # formula_roots are only recognised by scanning the identified + # formula_terms. + self.check_result(result, factory_type=None) + + def test_unrecognised_verticaltype(self): + # Set the root variable name to something NOT a recognised hybrid type. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 007 : fc_formula_type(FAILED - unrecognised formula type = 'unknown') + # 008 : fc_formula_term(a) + # 009 : fc_formula_term(b) + result = self.run_testcase( + formula_root_name="unknown", + term_names=["a", "b"], + warning="Ignored formula of unrecognised type: 'unknown'.", + ) + # Check that it picks up the terms, but *not* the factory root coord, + # which is simply discarded. + self.check_result(result, factory_type=None, formula_terms=["a", "b"]) + + def test_two_formulae(self): + # Construct an example with TWO hybrid coords. + # This is not errored, but we don't correctly support it. + # + # NOTE: the original Pyke implementation does not detect this problem + # By design, the new mechanism does + will raise a warning. + warning = ( + "Omitting factories for some hybrid coordinates.*" + "multiple hybrid coordinates.* not supported" + ) + + extra_type = "ocean_sigma_coordinate" + result = self.run_testcase( + extra_formula_type=extra_type, warning=warning + ) + # NOTE: FOR NOW, check expected behaviour : only one factory will be + # built, but there are coordinates (terms) for both types. + # TODO: this is a bug and needs fixing : translation should handle + # multiple hybrid coordinates in a sensible way. + self.check_result( + result, + factory_type=extra_type, + formula_terms=["a", "b", "depth", "eta", "orog", "sigma"], + ) + + +# Add in tests methods to exercise each (supported) vertical coordinate type +# individually. +# NOTE: hh.CF_COORD_VERTICAL lists all the valid types, but we don't yet +# support all of them. +_SUPPORTED_FORMULA_TYPES = ( + # NOTE: omit "atmosphere_hybrid_height_coordinate" : our basic testcase + "atmosphere_hybrid_sigma_pressure_coordinate", + "ocean_sigma_z_coordinate", + "ocean_sigma_coordinate", + "ocean_s_coordinate", + "ocean_s_coordinate_g1", + "ocean_s_coordinate_g2", +) +for hybrid_type in _SUPPORTED_FORMULA_TYPES: + + def construct_inner_func(hybrid_type): + term_names = hh.CF_COORD_VERTICAL[hybrid_type] + + def inner(self): + result = self.run_testcase( + formula_root_name=hybrid_type, term_names=term_names + ) + self.check_result( + result, factory_type=hybrid_type, formula_terms=term_names + ) + + return inner + + # Note: use an intermediate function to generate each test method, simply to + # generate a new local variable for 'hybrid_type' on each iteration. + # Otherwise all the test methods will refer to the *same* 'hybrid_type' + # variable, i.e. the loop variable, which does not work ! + method_name = f"test_{hybrid_type}_coord" + setattr( + Test__formulae_tests, method_name, construct_inner_func(hybrid_type) + ) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py new file mode 100644 index 0000000000..4ed90fd79a --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -0,0 +1,221 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Tests for rules activation relating to some isolated aspects : + * UKMO um-specific metadata + * label coordinates + * cell measures + * ancillary variables + +""" +import iris.tests as tests # isort: skip + +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure +from iris.fileformats.pp import STASH +from iris.tests.unit.fileformats.nc_load_rules.actions import ( + Mixin__nc_load_actions, +) + + +class Test__ukmo_attributes(Mixin__nc_load_actions, tests.IrisTest): + # Tests for handling of the special UM-specific data-var attributes. + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def _make_testcase_cdl(self, **add_attrs): + phenom_attrs_string = "" + for key, value in add_attrs.items(): + phenom_attrs_string += f""" + phenom:{key} = "{value}" ; +""" + + cdl_string = f""" +netcdf test {{ + dimensions: + xdim = 2 ; + variables: + double phenom(xdim) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_attrs_string} +}} +""" + return cdl_string + + def check_result(self, cube, stashcode=None, processflags=None): + cube_stashattr = cube.attributes.get("STASH") + cube_processflags = cube.attributes.get("ukmo__process_flags") + + if stashcode is not None: + self.assertIsInstance(cube_stashattr, STASH) + self.assertEqual(str(stashcode), str(cube_stashattr)) + else: + self.assertIsNone(cube_stashattr) + + if processflags is not None: + self.assertIsInstance(cube_processflags, tuple) + self.assertEqual(set(cube_processflags), set(processflags)) + else: + self.assertIsNone(cube_processflags) + + # + # Testcase routines + # + stashcode = "m01s02i034" # Just one valid STASH msi string for testing + + def test_stash(self): + cube = self.run_testcase(um_stash_source=self.stashcode) + self.check_result(cube, stashcode=self.stashcode) + + def test_stash_altname(self): + cube = self.run_testcase(ukmo__um_stash_source=self.stashcode) + self.check_result(cube, stashcode=self.stashcode) + + def test_stash_empty(self): + msg = "Expected STASH code MSI string" + with self.assertRaisesRegex(ValueError, msg): + self.run_testcase(ukmo__um_stash_source="") + + def test_stash_invalid(self): + msg = "Expected STASH code MSI string" + with self.assertRaisesRegex(ValueError, msg): + self.run_testcase(ukmo__um_stash_source="XXX") + + def test_processflags_single(self): + cube = self.run_testcase(ukmo__process_flags="this") + self.check_result(cube, processflags=["this"]) + + def test_processflags_multi_with_underscores(self): + flags_testinput = "this that_1 the_other_one x" + flags_expectresult = ["this", "that 1", "the other one", "x"] + cube = self.run_testcase(ukmo__process_flags=flags_testinput) + self.check_result(cube, processflags=flags_expectresult) + + def test_processflags_empty(self): + cube = self.run_testcase(ukmo__process_flags="") + expected_result = [""] # May seem odd, but that's what it does. + self.check_result(cube, processflags=expected_result) + + +class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions, tests.IrisTest): + # Tests for some simple rules that translate facts directly into cube data, + # with no alternative actions, complications or failure modes to test. + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def _make_testcase_cdl( + self, + include_label=False, + include_cellmeasure=False, + include_ancil=False, + ): + + phenom_extra_attrs_string = "" + extra_vars_string = "" + + if include_label: + phenom_extra_attrs_string += """ + phenom:coordinates = "v_label" ; +""" + extra_vars_string += """ + char v_label(xdim, strdim) ; + v_label:long_name = "string data" ; +""" + + if include_cellmeasure: + # One simple case : a valid link + a variable definition. + phenom_extra_attrs_string += """ + phenom:cell_measures = "area: v_cellm" ; +""" + extra_vars_string += """ + double v_cellm(xdim) ; + v_cellm:long_name = "cell areas" ; +""" + + if include_ancil: + # One simple case : a valid link + a variable definition. + phenom_extra_attrs_string += """ + phenom:ancillary_variables = "v_ancil" ; +""" + extra_vars_string += """ + double v_ancil(xdim) ; + v_ancil:long_name = "ancillary values" ; +""" + cdl_string = f""" + netcdf test {{ + dimensions: + xdim = 2 ; + strdim = 5 ; + variables: + double phenom(xdim) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_extra_attrs_string} +{extra_vars_string} + }} + """ + return cdl_string + + def check_result( + self, + cube, + expect_label=False, + expect_cellmeasure=False, + expect_ancil=False, + ): + label_coords = cube.coords(var_name="v_label") + if expect_label: + self.assertEqual(len(label_coords), 1) + (coord,) = label_coords + self.assertIsInstance(coord, AuxCoord) + self.assertEqual(coord.dtype.kind, "U") + else: + self.assertEqual(len(label_coords), 0) + + cell_measures = cube.cell_measures() + if expect_cellmeasure: + self.assertEqual(len(cell_measures), 1) + (cellm,) = cell_measures + self.assertIsInstance(cellm, CellMeasure) + else: + self.assertEqual(len(cell_measures), 0) + + ancils = cube.ancillary_variables() + if expect_ancil: + self.assertEqual(len(ancils), 1) + (ancil,) = ancils + self.assertIsInstance(ancil, AncillaryVariable) + else: + self.assertEqual(len(ancils), 0) + + def test_label(self): + cube = self.run_testcase(include_label=True) + self.check_result(cube, expect_label=True) + + def test_ancil(self): + cube = self.run_testcase(include_ancil=True) + self.check_result(cube, expect_ancil=True) + + def test_cellmeasure(self): + cube = self.run_testcase(include_cellmeasure=True) + self.check_result(cube, expect_cellmeasure=True) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py new file mode 100644 index 0000000000..47760aadcb --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -0,0 +1,462 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Tests for rules activation relating to 'time' and 'time_period' coords. + +""" +import iris.tests as tests # isort: skip + +from iris.coords import AuxCoord, DimCoord +from iris.tests.unit.fileformats.nc_load_rules.actions import ( + Mixin__nc_load_actions, +) + + +class Opts(dict): + # A dict-like thing which provides '.' access in place of indexing. + def __init__(self, **kwargs): + # Init like a dict + super().__init__(**kwargs) + # Alias contents "self['key']", as properties "self.key" + # See: https://stackoverflow.com/a/14620633/2615050 + self.__dict__ = self + + +# Per-coord options settings for testcase definitions. +_COORD_OPTIONS_TEMPLATE = { + "which": "", # set to "something" + "stdname": "_auto_which", # default = time / time_period + "varname": "_as_which", # default = time / period + "dimname": "_as_which", + "in_phenomvar_dims": True, + "in_phenomvar_coords": False, # set for an aux-coord + "values_all_zero": False, # set to block CFDimensionVariable identity + "units": "_auto_which", # specific to time/period +} + + +class Mixin__timecoords__common(Mixin__nc_load_actions): + def _make_testcase_cdl( + self, + phenom_dims="_auto", # =get from time+period opts + phenom_coords="_auto", # =get from time+period opts + time_opts=None, + period_opts=None, + timedim_name="time", + perioddim_name="period", + ): + opt_t = None + opt_p = None + if time_opts is not None: + # Convert a non-null kwarg into an options dict for 'time' options + opt_t = Opts(**_COORD_OPTIONS_TEMPLATE) + opt_t.update(which="time", **time_opts) + if period_opts is not None: + # Convert a non-null kwarg into an options dict for 'period' options + opt_p = Opts(**_COORD_OPTIONS_TEMPLATE) + opt_p.update(which="period", **period_opts) + + # Define the 'standard' dimensions which we will create + # NB we don't necessarily *use* either of these + dims_and_lens = {timedim_name: 2, perioddim_name: 3} + dims_string = "\n".join( + [ + f" {name} = {length} ;" + for name, length in dims_and_lens.items() + ] + ) + + phenom_auto_dims = [] + phenom_auto_coords = [] + coord_variables_string = "" + data_string = "" + for opt in (opt_t, opt_p): + # Handle computed defaults and common info for both coord options. + if opt: + if opt.which not in ("time", "period"): + raise ValueError(f"unrecognised opt.which={opt.which}") + + # Do computed defaults. + if opt.stdname == "_auto_which": + if opt.which == "time": + opt.stdname = "time" + else: + assert opt.which == "period" + opt.stdname = "forecast_period" + if opt.varname == "_as_which": + opt.varname = opt.which + if opt.dimname == "_as_which": + opt.dimname = opt.which + if opt.units == "_auto_which": + if opt.which == "time": + opt.units = "hours since 2000-01-01" + else: + assert opt.which == "period" + opt.units = "hours" + + # Build 'auto' lists of phenom dims and (aux) coordinates. + if opt.in_phenomvar_dims: + phenom_auto_dims.append(opt.dimname) + if opt.in_phenomvar_coords: + phenom_auto_coords.append(opt.varname) + + # Add a definition of the coord variable. + coord_variables_string += f""" + double {opt.varname}({opt.dimname}) ; + {opt.varname}:standard_name = "{opt.stdname}" ; + {opt.varname}:units = "{opt.units}" ; +""" + # NOTE: we don't bother with an 'axis' property. + # We can probe the behaviour we need without that, because we + # are *not* testing the cf.py categorisation code, or the + # helper "build_xxx" routines. + + # Define coord-var data values (so it can be a dimension). + varname = opt.varname + if opt.values_all_zero: + # Use 'values_all_zero' to prevent a dim-var from + # identifying as a CFDimensionCoordinate (as it is + # non-monotonic). + dim_vals = [0.0] * dims_and_lens[opt.dimname] + else: + # "otherwise", assign an ascending sequence. + dim_vals = range(dims_and_lens[opt.dimname]) + dimvals_string = ", ".join(f"{val:0.1f}" for val in dim_vals) + data_string += f"\n {varname} = {dimvals_string} ;" + + if phenom_dims == "_auto": + phenom_dims = phenom_auto_dims + if not phenom_dims: + phenom_dims_string = "" + else: + phenom_dims_string = ", ".join(phenom_dims) + + if phenom_coords == "_auto": + phenom_coords = phenom_auto_coords + if not phenom_coords: + phenom_coords_string = "" + else: + phenom_coords_string = " ".join(phenom_coords) + phenom_coords_string = ( + " " + f'phenom:coordinates = "{phenom_coords_string}" ; ' + ) + + # Create a testcase with time dims + coords. + cdl_string = f""" +netcdf test {{ + dimensions: +{dims_string} + variables: + double phenom({phenom_dims_string}) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_coords_string} + +{coord_variables_string} + data: +{data_string} +}} +""" + return cdl_string + + def check_result(self, cube, time_is="dim", period_is="missing"): + """ + Check presence of expected dim/aux-coords in the result cube. + + Both of 'time_is' and 'period_is' can take values 'dim', 'aux' or + 'missing'. + + """ + options = ("dim", "aux", "missing") + msg = f'Invalid "{{name}}" = {{opt}} : Not one of {options!r}.' + if time_is not in options: + raise ValueError(msg.format(name="time_is", opt=time_is)) + if period_is not in options: + raise ValueError(msg.format(name="period_is", opt=period_is)) + + # Get the facts we want to check + time_name = "time" + period_name = "forecast_period" + time_dimcos = cube.coords(time_name, dim_coords=True) + time_auxcos = cube.coords(time_name, dim_coords=False) + period_dimcos = cube.coords(period_name, dim_coords=True) + period_auxcos = cube.coords(period_name, dim_coords=False) + + if time_is == "dim": + self.assertEqual(len(time_dimcos), 1) + self.assertEqual(len(time_auxcos), 0) + elif time_is == "aux": + self.assertEqual(len(time_dimcos), 0) + self.assertEqual(len(time_auxcos), 1) + else: + self.assertEqual(len(time_dimcos), 0) + self.assertEqual(len(time_auxcos), 0) + + if period_is == "dim": + self.assertEqual(len(period_dimcos), 1) + self.assertEqual(len(period_auxcos), 0) + elif period_is == "aux": + self.assertEqual(len(period_dimcos), 0) + self.assertEqual(len(period_auxcos), 1) + else: + self.assertEqual(len(period_dimcos), 0) + self.assertEqual(len(period_auxcos), 0) + + # Also check expected built Coord types. + if time_is == "dim": + self.assertIsInstance(time_dimcos[0], DimCoord) + elif time_is == "aux": + self.assertIsInstance(time_auxcos[0], AuxCoord) + + if period_is == "dim": + self.assertIsInstance(period_dimcos[0], DimCoord) + elif period_is == "aux": + self.assertIsInstance(period_auxcos[0], AuxCoord) + + +class Mixin__singlecoord__tests(Mixin__timecoords__common): + # Coordinate tests to be run for both 'time' and 'period' coordinate vars. + # Set (in inheritors) to select time/period testing. + which = None + + def run_testcase(self, coord_dim_name=None, **opts): + """ + Specialise 'run_testcase' for single-coord 'time' or 'period' testing. + """ + which = self.which + assert which in ("time", "period") + + # Separate the 'Opt' keywords from "others" : others are passed + # directly to the parent routine, whereas 'Opt' ones are passed to + # 'time_opts' / 'period_opts' keys accordingly. + general_opts = {} + for key, value in list(opts.items()): + if key not in _COORD_OPTIONS_TEMPLATE.keys(): + del opts[key] + general_opts[key] = value + + if coord_dim_name is not None: + # Translate this into one of timedim_name/perioddim_name + general_opts[f"{which}dim_name"] = coord_dim_name + + period_opts = None + time_opts = None + if which == "time": + time_opts = opts + else: + period_opts = opts + + result = super().run_testcase( + time_opts=time_opts, period_opts=period_opts, **general_opts + ) + + return result + + def check_result(self, cube, coord_is="dim"): + """ + Specialise 'check_result' for single-coord 'time' or 'period' testing. + """ + # Pass generic 'coord_is' option to parent as time/period options. + which = self.which + assert which in ("time", "period") + + if which == "time": + time_is = coord_is + period_is = "missing" + else: + period_is = coord_is + time_is = "missing" + + super().check_result(cube, time_is=time_is, period_is=period_is) + + # + # Generic single-coordinate testcases. + # ( these are repeated for both 'time' and 'time_period' ) + # + + def test_dimension(self): + # Coord is a normal dimension --> dimcoord + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(time[[_period]]) + # 003 : fc_build_coordinate_(time[[_period]]) + result = self.run_testcase() + self.check_result(result, "dim") + + def test_dimension_in_phenom_coords(self): + # Dimension coord also present in phenom:coords. + # Strictly wrong but a common error in datafiles : must tolerate. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(time[[_period]]) + # 003 : fc_build_coordinate_(time[[_period]]) + result = self.run_testcase(in_phenomvar_coords=True) + self.check_result(result, "dim") + + def test_dim_nonmonotonic(self): + # Coord has all-zero values, which prevents it being a dimcoord. + # The rule has a special way of treating it as an aux coord + # -- even though it doesn't appear in the phenom coords. + # ( Done by the build_coord routine, so not really a rules issue). + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(time[[_period]]) + # 003 : fc_build_coordinate_(time[[_period]]) + msg = "Failed to create.* dimension coordinate" + result = self.run_testcase(values_all_zero=True, warning=msg) + self.check_result(result, "aux") + + def test_dim_fails_typeident(self): + # Provide a coord variable, identified as a CFDimensionCoordinate by + # cf.py, but with the "wrong" units for a time or period coord. + # This causes it to fail both 'is_time' and 'is_period' tests and so, + # within the 'action_provides_coordinate' routine, does not trigger as + # a 'provides_coord_(time[[_period]])' rule, but instead as a + # 'default_coordinate_(provide-phase)'. + # As a result, it is built as a 'miscellaneous' dim-coord. + # N.B. this makes *no* practical difference, because a 'misc' dim + # coord is still a dim coord (albeit one with incorrect units). + # N.B.#2 that is different from lat/lon coords, where the coord-specific + # 'build' rules have the extra effect of setting a fixed standard-name. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_default_coordinate_(provide-phase) + # 003 : fc_build_coordinate_(miscellaneous) + result = self.run_testcase(units="1") + self.check_result(result, "dim") + + def test_aux(self): + # time/period is installed as an auxiliary coord. + # For this, rename both DIMENSIONS, so that the generated coords are + # not actually CF coordinates. + # For a valid case, we must *also* have a ref in phenom:coordinates + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate_time[[_period]] + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + ) + self.check_result(result, "aux") + + def test_aux_not_in_phenom_coords(self): + # time/period is installed as an auxiliary coord, + # but we DIDN'T list it in phenom:coords -- otherwise as previous. + # Should have no result at all. + # + # Rules Triggered: + # 001 : fc_default + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=False, + ) # "should" be True for an aux-coord + self.check_result(result, "missing") + + def test_aux_fails_typeident(self): + # We provide a non-dimension coord variable, identified as a + # CFAuxiliaryCoordinate by cf.py, but we also give it "wrong" units, + # unsuitable for a time or period coord. + # Because it fails both 'is_time' and 'is_period' tests, it then does + # not trigger 'fc_build_auxiliary_coordinate_time[[_period]]'. + # As in the above testcase 'test_dim_fails_typeident', the routine + # 'action_build_auxiliary_coordinate' therefore builds this as a + # 'miscellaneous' rather than a specific coord type (time or period). + # However, also as in that other case, this makes absolutely no + # practical difference -- unlike for latitude or longitutude coords, + # where it may affect the standard-name. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + units="1", + ) + self.check_result(result, "aux") + + +class Test__time(Mixin__singlecoord__tests, tests.IrisTest): + # Run 'time' coord tests + which = "time" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Test__period(Mixin__singlecoord__tests, tests.IrisTest): + # Run 'time_period' coord tests + which = "period" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Test__dualcoord(Mixin__timecoords__common, tests.IrisTest): + # Coordinate tests for a combination of 'time' and 'time_period'. + # Not strictly necessary, as handling is independent, but a handy check + # on typical usage. + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_time_and_period(self): + # Test case with both 'time' and 'period', with separate dims. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(time) + # 003 : fc_provides_coordinate_(time_period) + # 004 : fc_build_coordinate_(time) + # 005 : fc_build_coordinate_(time_period) + result = self.run_testcase(time_opts={}, period_opts={}) + self.check_result(result, time_is="dim", period_is="dim") + + def test_time_dim_period_aux(self): + # Test case with both 'time' and 'period' sharing a dim. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_(time) + # 003 : fc_build_coordinate_(time) + # 004 : fc_build_auxiliary_coordinate_time_period + result = self.run_testcase( + time_opts={}, + period_opts=dict( + dimname="time", + in_phenomvar_dims=False, + in_phenomvar_coords=True, + ), + ) + self.check_result(result, time_is="dim", period_is="aux") + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/fileformats/_pyke_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py similarity index 68% rename from lib/iris/fileformats/_pyke_rules/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py index ac5753e58b..e6508bea85 100644 --- a/lib/iris/fileformats/_pyke_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py @@ -3,3 +3,8 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. +""" +Unit tests for the module +:mod:`iris.fileformats.netcdf._nc_load_rules.engine` . + +""" diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py new file mode 100644 index 0000000000..df5fbd4922 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -0,0 +1,103 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. + +""" +from unittest import mock + +from iris.fileformats._nc_load_rules.engine import Engine, FactEntity +import iris.tests as tests + + +class Test_Engine(tests.IrisTest): + def setUp(self): + self.empty_engine = Engine() + engine = Engine() + engine.add_fact("this", ("that", "other")) + self.nonempty_engine = engine + + def test__init(self): + # Check that init creates an empty Engine. + engine = Engine() + self.assertIsInstance(engine, Engine) + self.assertIsInstance(engine.facts, FactEntity) + self.assertEqual(list(engine.facts.entity_lists.keys()), []) + + def test_reset(self): + # Check that calling reset() causes a non-empty engine to be emptied. + engine = self.nonempty_engine + fact_names = list(engine.facts.entity_lists.keys()) + self.assertNotEqual(len(fact_names), 0) + engine.reset() + fact_names = list(engine.facts.entity_lists.keys()) + self.assertEqual(len(fact_names), 0) + + def test_activate(self): + # Check that calling engine.activate() --> actions.run_actions(engine) + engine = self.empty_engine + target = "iris.fileformats._nc_load_rules.engine.run_actions" + run_call = self.patch(target) + engine.activate() + self.assertEqual(run_call.call_args_list, [mock.call(engine)]) + + def test_add_case_specific_fact__newname(self): + # Adding a new fact to a new fact-name records as expected. + engine = self.nonempty_engine + engine.add_case_specific_fact("new_fact", ("a1", "a2")) + self.assertEqual(engine.fact_list("new_fact"), [("a1", "a2")]) + + def test_add_case_specific_fact__existingname(self): + # Adding a new fact to an existing fact-name records as expected. + engine = self.nonempty_engine + name = "this" + self.assertEqual(engine.fact_list(name), [("that", "other")]) + engine.add_case_specific_fact(name, ("yetanother",)) + self.assertEqual( + engine.fact_list(name), [("that", "other"), ("yetanother",)] + ) + + def test_add_case_specific_fact__emptyargs(self): + # Check that empty args work ok, and will create a new fact. + engine = self.empty_engine + engine.add_case_specific_fact("new_fact", ()) + self.assertIn("new_fact", engine.facts.entity_lists) + self.assertEqual(engine.fact_list("new_fact"), [()]) + + def test_add_fact(self): + # Check that 'add_fact' is equivalent to (short for) a call to + # 'add_case_specific_fact'. + engine = self.empty_engine + target = ( + "iris.fileformats._nc_load_rules.engine.Engine" + ".add_case_specific_fact" + ) + acsf_call = self.patch(target) + engine.add_fact("extra", ()) + self.assertEqual(acsf_call.call_count, 1) + self.assertEqual( + acsf_call.call_args_list, + [mock.call(fact_name="extra", fact_arglist=())], + ) + + def test_get_kb(self): + # Check that this stub just returns the facts database. + engine = self.nonempty_engine + kb = engine.get_kb() + self.assertIsInstance(kb, FactEntity) + self.assertIs(kb, engine.facts) + + def test_fact_list__existing(self): + self.assertEqual( + self.nonempty_engine.fact_list("this"), [("that", "other")] + ) + + def test_fact_list__nonexisting(self): + self.assertEqual(self.empty_engine.fact_list("odd-unknown"), []) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py similarity index 67% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index ae709e85e1..69a536b9ae 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -3,4 +3,8 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris.fileformats.fc_rules_cf_fc` module.""" +""" +Unit tests for the module +:mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . + +""" diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py similarity index 79% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_albers_equal_area_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index 62e6d2e6b2..c040d43ca0 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_albers_equal_area_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_albers_equal_area_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import AlbersEqualArea -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_albers_equal_area_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_albers_equal_area_coordinate_system, +) class TestBuildAlbersEqualAreaCoordinateSystem(tests.IrisTest): @@ -44,19 +45,21 @@ def _test(self, inverse_flattening=False, no_optionals=False): longitude_of_central_meridian=test_lon, false_easting=test_easting, false_northing=test_northing, - standard_parallel=test_parallels) + standard_parallel=test_parallels, + ) # Add ellipsoid args. - gridvar_props['semi_major_axis'] = 6377563.396 + gridvar_props["semi_major_axis"] = 6377563.396 if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) cf_grid_var = mock.Mock(spec=[], **gridvar_props) @@ -68,7 +71,8 @@ def _test(self, inverse_flattening=False, no_optionals=False): false_easting=test_easting, false_northing=test_northing, standard_parallels=test_parallels, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py similarity index 68% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index bfe55a79a7..95f892454b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_auxilliary_coordinate`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_auxilliary_coordinate`. """ @@ -18,52 +18,55 @@ import numpy as np from iris.coords import AuxCoord +from iris.fileformats._nc_load_rules.helpers import build_auxiliary_coordinate from iris.fileformats.cf import CFVariable -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_auxiliary_coordinate -# from iris.tests.unit.fileformats.pyke_rules.compiled_krb\ -# .fc_rules_cf_fc.test_build_dimension_coordinate import RulesTestMixin - class TestBoundsVertexDim(tests.IrisTest): # Lookup for various tests (which change the dimension order). dim_names_lens = { - 'foo': 2, 'bar': 3, 'nv': 4, + "foo": 2, + "bar": 3, + "nv": 4, # 'x' and 'y' used as aliases for 'foo' and 'bar' - 'x': 2, 'y': 3} + "x": 2, + "y": 3, + } def setUp(self): # Create coordinate cf variables and pyke engine. - dimension_names = ('foo', 'bar') + dimension_names = ("foo", "bar") points, cf_data = self._make_array_and_cf_data(dimension_names) self.cf_coord_var = mock.Mock( spec=CFVariable, dimensions=dimension_names, - cf_name='wibble', + cf_name="wibble", cf_data=cf_data, standard_name=None, - long_name='wibble', - units='m', + long_name="wibble", + units="m", shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) expected_bounds, _ = self._make_array_and_cf_data( - dimension_names=('foo', 'bar', 'nv')) + dimension_names=("foo", "bar", "nv") + ) self.expected_coord = AuxCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=expected_bounds) + bounds=expected_bounds, + ) self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar'), - cf_data=cf_data), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar"), cf_data=cf_data), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) # Patch the deferred loading that prevents attempted file access. # This assumes that self.cf_bounds_var is defined in the test case. @@ -73,8 +76,10 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + self.patch( + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) # Patch the helper function that retrieves the bounds cf variable, # and a False flag for climatological. @@ -83,14 +88,14 @@ def _get_per_test_bounds_var(_coord_unused): # Return the 'cf_bounds_var' created by the current test. return (self.cf_bounds_var, False) - self.patch('iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - new=_get_per_test_bounds_var) + self.patch( + "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", + new=_get_per_test_bounds_var, + ) @classmethod def _make_array_and_cf_data(cls, dimension_names): - shape = tuple(cls.dim_names_lens[name] - for name in dimension_names) + shape = tuple(cls.dim_names_lens[name] for name in dimension_names) cf_data = mock.MagicMock(_FillValue=None, spec=[]) cf_data.chunking = mock.MagicMock(return_value=shape) return np.zeros(shape), cf_data @@ -101,43 +106,45 @@ def _make_cf_bounds_var(self, dimension_names): cf_bounds_var = mock.Mock( spec=CFVariable, dimensions=dimension_names, - cf_name='wibble_bnds', + cf_name="wibble_bnds", cf_data=cf_data, shape=bounds.shape, dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) return bounds, cf_bounds_var def _check_case(self, dimension_names): bounds, self.cf_bounds_var = self._make_cf_bounds_var( - dimension_names=dimension_names) + dimension_names=dimension_names + ) # Asserts must lie within context manager because of deferred loading. build_auxiliary_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. self.engine.cube.add_aux_coord.assert_called_with( - self.expected_coord, [0, 1]) + self.expected_coord, [0, 1] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) def test_fastest_varying_vertex_dim(self): # The usual order. - self._check_case(dimension_names=('foo', 'bar', 'nv')) + self._check_case(dimension_names=("foo", "bar", "nv")) def test_slowest_varying_vertex_dim(self): # Bounds in the first (slowest varying) dimension. - self._check_case(dimension_names=('nv', 'foo', 'bar')) + self._check_case(dimension_names=("nv", "foo", "bar")) def test_fastest_with_different_dim_names(self): # Despite the dimension names ('x', and 'y') differing from the coord's # which are 'foo' and 'bar' (as permitted by the cf spec), # this should still work because the vertex dim is the fastest varying. - self._check_case(dimension_names=('x', 'y', 'nv')) + self._check_case(dimension_names=("x", "y", "nv")) class TestDtype(tests.IrisTest): @@ -149,21 +156,23 @@ def setUp(self): self.cf_coord_var = mock.Mock( spec=CFVariable, - dimensions=('foo', 'bar'), - cf_name='wibble', + dimensions=("foo", "bar"), + cf_name="wibble", cf_data=cf_data, standard_name=None, - long_name='wibble', - units='m', + long_name="wibble", + units="m", shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar')), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar")), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) def patched__getitem__(proxy_self, keys): if proxy_self.variable_name == self.cf_coord_var.cf_name: @@ -171,8 +180,9 @@ def patched__getitem__(proxy_self, keys): raise RuntimeError() self.deferred_load_patch = mock.patch( - 'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) def test_scale_factor_add_offset_int(self): self.cf_coord_var.scale_factor = 3 @@ -181,26 +191,26 @@ def test_scale_factor_add_offset_int(self): with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.cube_parts['coordinates'][0] - self.assertEqual(coord.dtype.kind, 'i') + coord, _ = self.engine.cube_parts["coordinates"][0] + self.assertEqual(coord.dtype.kind, "i") def test_scale_factor_float(self): - self.cf_coord_var.scale_factor = 3. + self.cf_coord_var.scale_factor = 3.0 with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.cube_parts['coordinates'][0] - self.assertEqual(coord.dtype.kind, 'f') + coord, _ = self.engine.cube_parts["coordinates"][0] + self.assertEqual(coord.dtype.kind, "f") def test_add_offset_float(self): - self.cf_coord_var.add_offset = 5. + self.cf_coord_var.add_offset = 5.0 with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.cube_parts['coordinates'][0] - self.assertEqual(coord.dtype.kind, 'f') + coord, _ = self.engine.cube_parts["coordinates"][0] + self.assertEqual(coord.dtype.kind, "f") class TestCoordConstruction(tests.IrisTest): @@ -208,35 +218,40 @@ def setUp(self): # Create dummy pyke engine. self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar')), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar")), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) points = np.arange(6) self.cf_coord_var = mock.Mock( - dimensions=('foo',), + dimensions=("foo",), scale_factor=1, add_offset=0, - cf_name='wibble', - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_name="wibble", + cf_data=mock.MagicMock( + chunking=mock.Mock(return_value=None), spec=[] + ), standard_name=None, - long_name='wibble', - units='days since 1970-01-01', + long_name="wibble", + units="days since 1970-01-01", calendar=None, shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), + dimensions=("x", "nv"), scale_factor=1, add_offset=0, - cf_name='wibble_bnds', + cf_name="wibble_bnds", cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), shape=bounds.shape, dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) self.bounds = bounds # Create patch for deferred loading that prevents attempted @@ -248,8 +263,10 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + self.patch( + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) # Patch the helper function that retrieves the bounds cf variable. # This avoids the need for setting up further mocking of cf objects. @@ -258,9 +275,10 @@ def patched__getitem__(proxy_self, keys): def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds - self.patch('iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - new=get_cf_bounds_var) + self.patch( + "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", + new=get_cf_bounds_var, + ) def check_case_aux_coord_construction(self, climatology=False): # Test a generic auxiliary coordinate, with or without @@ -273,13 +291,13 @@ def check_case_aux_coord_construction(self, climatology=False): var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, bounds=self.bounds, - climatological=climatology) + climatological=climatology, + ) build_auxiliary_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0]) + self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) def test_aux_coord_construction(self): self.check_case_aux_coord_construction(climatology=False) @@ -288,5 +306,5 @@ def test_aux_coord_construction__climatology(self): self.check_case_aux_coord_construction(climatology=True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py similarity index 67% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py index de3354901b..a13fa6cca0 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_cube_metadata`. +Test function :func:`iris.fileformats._nc_load_rules.helpers\ +build_cube_metadata`. """ @@ -18,8 +18,7 @@ import numpy as np from iris.cube import Cube -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_cube_metadata +from iris.fileformats._nc_load_rules.helpers import build_cube_metadata def _make_engine(global_attributes=None, standard_name=None, long_name=None): @@ -29,44 +28,48 @@ def _make_engine(global_attributes=None, standard_name=None, long_name=None): cf_group = mock.Mock(global_attributes=global_attributes) cf_var = mock.MagicMock( - cf_name='wibble', + cf_name="wibble", standard_name=standard_name, long_name=long_name, - units='m', + units="m", dtype=np.float64, cell_methods=None, - cf_group=cf_group) + cf_group=cf_group, + ) - engine = mock.Mock( - cube=Cube([23]), - cf_var=cf_var) + engine = mock.Mock(cube=Cube([23]), cf_var=cf_var) return engine class TestInvalidGlobalAttributes(tests.IrisTest): def test_valid(self): - global_attributes = {'Conventions': 'CF-1.5', - 'comment': 'Mocked test object'} + global_attributes = { + "Conventions": "CF-1.5", + "comment": "Mocked test object", + } engine = _make_engine(global_attributes) build_cube_metadata(engine) expected = global_attributes self.assertEqual(engine.cube.attributes, expected) def test_invalid(self): - global_attributes = {'Conventions': 'CF-1.5', - 'comment': 'Mocked test object', - 'calendar': 'standard'} + global_attributes = { + "Conventions": "CF-1.5", + "comment": "Mocked test object", + "calendar": "standard", + } engine = _make_engine(global_attributes) - with mock.patch('warnings.warn') as warn: + with mock.patch("warnings.warn") as warn: build_cube_metadata(engine) # Check for a warning. self.assertEqual(warn.call_count, 1) - self.assertIn("Skipping global attribute 'calendar'", - warn.call_args[0][0]) + self.assertIn( + "Skipping global attribute 'calendar'", warn.call_args[0][0] + ) # Check resulting attributes. The invalid entry 'calendar' # should be filtered out. - global_attributes.pop('calendar') + global_attributes.pop("calendar") expected = global_attributes self.assertEqual(engine.cube.attributes, expected) @@ -91,28 +94,31 @@ def test_standard_name_none_long_name_none(self): self.check_cube_names(inputs, expected) def test_standard_name_none_long_name_set(self): - inputs = (None, 'ice_thickness_long_name') - expected = (None, 'ice_thickness_long_name') + inputs = (None, "ice_thickness_long_name") + expected = (None, "ice_thickness_long_name") self.check_cube_names(inputs, expected) def test_standard_name_valid_long_name_none(self): - inputs = ('sea_ice_thickness', None) - expected = ('sea_ice_thickness', None) + inputs = ("sea_ice_thickness", None) + expected = ("sea_ice_thickness", None) self.check_cube_names(inputs, expected) def test_standard_name_valid_long_name_set(self): - inputs = ('sea_ice_thickness', 'ice_thickness_long_name') - expected = ('sea_ice_thickness', 'ice_thickness_long_name') + inputs = ("sea_ice_thickness", "ice_thickness_long_name") + expected = ("sea_ice_thickness", "ice_thickness_long_name") self.check_cube_names(inputs, expected) def test_standard_name_invalid_long_name_none(self): - inputs = ('not_a_standard_name', None) - expected = (None, 'not_a_standard_name',) + inputs = ("not_a_standard_name", None) + expected = ( + None, + "not_a_standard_name", + ) self.check_cube_names(inputs, expected) def test_standard_name_invalid_long_name_set(self): - inputs = ('not_a_standard_name', 'ice_thickness_long_name') - expected = (None, 'ice_thickness_long_name') + inputs = ("not_a_standard_name", "ice_thickness_long_name") + expected = (None, "ice_thickness_long_name") self.check_cube_names(inputs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py similarity index 72% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index 50d81b2c1f..a75678d923 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_dimension_coordinate`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_dimension_coordinate`. """ @@ -19,8 +19,7 @@ import numpy as np from iris.coords import AuxCoord, DimCoord -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_dimension_coordinate +from iris.fileformats._nc_load_rules.helpers import build_dimension_coordinate class RulesTestMixin: @@ -28,9 +27,10 @@ def setUp(self): # Create dummy pyke engine. self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar')), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar")), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) # Create patch for deferred loading that prevents attempted # file access. This assumes that self.cf_coord_var and @@ -42,8 +42,9 @@ def patched__getitem__(proxy_self, keys): raise RuntimeError() self.deferred_load_patch = mock.patch( - 'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) # Patch the helper function that retrieves the bounds cf variable. # This avoids the need for setting up further mocking of cf objects. @@ -53,9 +54,9 @@ def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds self.get_cf_bounds_var_patch = mock.patch( - 'iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - new=get_cf_bounds_var) + "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", + new=get_cf_bounds_var, + ) class TestCoordConstruction(tests.IrisTest, RulesTestMixin): @@ -65,24 +66,26 @@ def setUp(self): bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), - cf_name='wibble_bnds', + dimensions=("x", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) self.bounds = bounds def _set_cf_coord_var(self, points): self.cf_coord_var = mock.Mock( - dimensions=('foo',), - cf_name='wibble', + dimensions=("foo",), + cf_name="wibble", cf_data=mock.Mock(spec=[]), standard_name=None, - long_name='wibble', - units='days since 1970-01-01', + long_name="wibble", + units="days since 1970-01-01", calendar=None, shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) def check_case_dim_coord_construction(self, climatology=False): # Test a generic dimension coordinate, with or without @@ -96,7 +99,8 @@ def check_case_dim_coord_construction(self, climatology=False): var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, bounds=self.bounds, - climatological=climatology) + climatological=climatology, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -104,7 +108,8 @@ def check_case_dim_coord_construction(self, climatology=False): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) def test_dim_coord_construction(self): self.check_case_dim_coord_construction(climatology=False) @@ -113,18 +118,21 @@ def test_dim_coord_construction__climatology(self): self.check_case_dim_coord_construction(climatology=True) def test_dim_coord_construction_masked_array(self): - self._set_cf_coord_var(np.ma.array( - np.arange(6), - mask=[True, False, False, False, False, False], - fill_value=-999, - )) + self._set_cf_coord_var( + np.ma.array( + np.arange(6), + mask=[True, False, False, False, False, False], + fill_value=-999, + ) + ) expected_coord = DimCoord( np.array([-999, 1, 2, 3, 4, 5]), long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) with warnings.catch_warnings(record=True) as w: # Asserts must lie within context manager because of deferred @@ -134,24 +142,28 @@ def test_dim_coord_construction_masked_array(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Assert warning is raised assert len(w) == 1 - assert 'Gracefully filling' in w[0].message.args[0] + assert "Gracefully filling" in w[0].message.args[0] def test_dim_coord_construction_masked_array_mask_does_nothing(self): - self._set_cf_coord_var(np.ma.array( - np.arange(6), - mask=False, - )) + self._set_cf_coord_var( + np.ma.array( + np.arange(6), + mask=False, + ) + ) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) with warnings.catch_warnings(record=True) as w: # Asserts must lie within context manager because of deferred @@ -161,7 +173,8 @@ def test_dim_coord_construction_masked_array_mask_does_nothing(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Assert no warning is raised assert len(w) == 0 @@ -175,7 +188,8 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) with warnings.catch_warnings(record=True) as w: # Asserts must lie within context manager because of deferred @@ -185,7 +199,8 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Assert no warning is raised assert len(w) == 0 @@ -200,20 +215,23 @@ def test_aux_coord_construction(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) - warning_patch = mock.patch('warnings.warn') + warning_patch = mock.patch("warnings.warn") # Asserts must lie within context manager because of deferred loading. - with warning_patch, self.deferred_load_patch, \ - self.get_cf_bounds_var_patch: + with warning_patch, self.deferred_load_patch, self.get_cf_bounds_var_patch: build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0]) - self.assertIn("creating 'wibble' auxiliary coordinate instead", - warnings.warn.call_args[0][0]) + expected_coord, [0] + ) + self.assertIn( + "creating 'wibble' auxiliary coordinate instead", + warnings.warn.call_args[0][0], + ) class TestBoundsVertexDim(tests.IrisTest, RulesTestMixin): @@ -223,24 +241,26 @@ def setUp(self): # Create test coordinate cf variable. points = np.arange(6) self.cf_coord_var = mock.Mock( - dimensions=('foo',), - cf_name='wibble', + dimensions=("foo",), + cf_name="wibble", standard_name=None, - long_name='wibble', + long_name="wibble", cf_data=mock.Mock(spec=[]), - units='m', + units="m", shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) def test_slowest_varying_vertex_dim(self): # Create the bounds cf variable. bounds = np.arange(12).reshape(2, 6) self.cf_bounds_var = mock.Mock( - dimensions=('nv', 'foo'), - cf_name='wibble_bnds', + dimensions=("nv", "foo"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) # Expected bounds on the resulting coordinate should be rolled so that # the vertex dimension is at the end. @@ -250,7 +270,8 @@ def test_slowest_varying_vertex_dim(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=expected_bounds) + bounds=expected_bounds, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -258,27 +279,31 @@ def test_slowest_varying_vertex_dim(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual( + self.engine.cube_parts["coordinates"], expected_list + ) def test_fastest_varying_vertex_dim(self): bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('foo', 'nv'), - cf_name='wibble_bnds', + dimensions=("foo", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=bounds) + bounds=bounds, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -286,12 +311,14 @@ def test_fastest_varying_vertex_dim(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual( + self.engine.cube_parts["coordinates"], expected_list + ) def test_fastest_with_different_dim_names(self): # Despite the dimension names 'x' differing from the coord's @@ -299,17 +326,19 @@ def test_fastest_with_different_dim_names(self): # this should still work because the vertex dim is the fastest varying. bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), - cf_name='wibble_bnds', + dimensions=("x", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=bounds) + bounds=bounds, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -317,12 +346,14 @@ def test_fastest_with_different_dim_names(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual( + self.engine.cube_parts["coordinates"], expected_list + ) class TestCircular(tests.IrisTest, RulesTestMixin): @@ -332,36 +363,38 @@ def setUp(self): RulesTestMixin.setUp(self) self.cf_bounds_var = None - def _make_vars(self, points, bounds=None, units='degrees'): + def _make_vars(self, points, bounds=None, units="degrees"): points = np.array(points) self.cf_coord_var = mock.MagicMock( - dimensions=('foo',), - cf_name='wibble', + dimensions=("foo",), + cf_name="wibble", standard_name=None, - long_name='wibble', + long_name="wibble", cf_data=mock.Mock(spec=[]), units=units, shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) if bounds: - bounds = np.array(bounds).reshape( - self.cf_coord_var.shape + (2,)) + bounds = np.array(bounds).reshape(self.cf_coord_var.shape + (2,)) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), - cf_name='wibble_bnds', + dimensions=("x", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) def _check_circular(self, circular, *args, **kwargs): - if 'coord_name' in kwargs: - coord_name = kwargs.pop('coord_name') + if "coord_name" in kwargs: + coord_name = kwargs.pop("coord_name") else: - coord_name = 'longitude' + coord_name = "longitude" self._make_vars(*args, **kwargs) with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var, - coord_name=coord_name) + build_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name=coord_name + ) self.assertEqual(self.engine.cube.add_dim_coord.call_count, 1) coord, dims = self.engine.cube.add_dim_coord.call_args[0] self.assertEqual(coord.circular, circular) @@ -395,11 +428,11 @@ def test_multiple_unbounded_circular(self): def test_non_angle_noncircular(self): points = [0.0, 90.0, 180.0, 270.0] - self.check_noncircular(points, units='m') + self.check_noncircular(points, units="m") def test_non_longitude_noncircular(self): points = [0.0, 90.0, 180.0, 270.0] - self.check_noncircular(points, coord_name='depth') + self.check_noncircular(points, coord_name="depth") def test_multiple_unbounded_irregular_noncircular(self): self.check_noncircular([0.0, 90.0, 189.999, 270.0]) @@ -411,16 +444,16 @@ def test_multiple_unbounded_shortrange_circular(self): self.check_circular([0.0, 90.0, 180.0, 269.9999]) def test_multiple_bounded_circular(self): - self.check_circular([0.0, 120.3, 240.0], - bounds=[[-45.0, 50.0], - [100.0, 175.0], - [200.0, 315.0]]) + self.check_circular( + [0.0, 120.3, 240.0], + bounds=[[-45.0, 50.0], [100.0, 175.0], [200.0, 315.0]], + ) def test_multiple_bounded_noncircular(self): - self.check_noncircular([0.0, 120.3, 240.0], - bounds=[[-45.0, 50.0], - [100.0, 175.0], - [200.0, 355.0]]) + self.check_noncircular( + [0.0, 120.3, 240.0], + bounds=[[-45.0, 50.0], [100.0, 175.0], [200.0, 355.0]], + ) class TestCircularScalar(tests.IrisTest, RulesTestMixin): @@ -431,57 +464,60 @@ def _make_vars(self, bounds): # Create cf vars for the coordinate and its bounds. # Note that for a scalar the shape of the array from # the cf var is (), rather than (1,). - points = np.array([0.]) + points = np.array([0.0]) self.cf_coord_var = mock.Mock( dimensions=(), - cf_name='wibble', + cf_name="wibble", standard_name=None, - long_name='wibble', - units='degrees', + long_name="wibble", + units="degrees", cf_data=mock.Mock(spec=[]), shape=(), dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) bounds = np.array(bounds) self.cf_bounds_var = mock.Mock( - dimensions=(u'bnds'), - cf_name='wibble_bnds', + dimensions=("bnds"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) def _assert_circular(self, value): with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var, - coord_name='longitude') + build_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name="longitude" + ) self.assertEqual(self.engine.cube.add_aux_coord.call_count, 1) coord, dims = self.engine.cube.add_aux_coord.call_args[0] self.assertEqual(coord.circular, value) def test_two_bounds_noncircular(self): - self._make_vars([0., 180.]) + self._make_vars([0.0, 180.0]) self._assert_circular(False) def test_two_bounds_circular(self): - self._make_vars([0., 360.]) + self._make_vars([0.0, 360.0]) self._assert_circular(True) def test_two_bounds_circular_decreasing(self): - self._make_vars([360., 0.]) + self._make_vars([360.0, 0.0]) self._assert_circular(True) def test_two_bounds_circular_alt(self): - self._make_vars([-180., 180.]) + self._make_vars([-180.0, 180.0]) self._assert_circular(True) def test_two_bounds_circular_alt_decreasing(self): - self._make_vars([180., -180.]) + self._make_vars([180.0, -180.0]) self._assert_circular(True) def test_four_bounds(self): - self._make_vars([0., 10., 20., 30.]) + self._make_vars([0.0, 10.0, 20.0, 30.0]) self._assert_circular(False) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py similarity index 68% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index b9a95bc094..28b3d8ab9a 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_geostationary_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_geostationary_coordinate_system`. """ @@ -17,12 +17,15 @@ import iris from iris.coord_systems import Geostationary -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_geostationary_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_geostationary_coordinate_system, +) class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, replace_props=None, remove_props=None): + def _test( + self, inverse_flattening=False, replace_props=None, remove_props=None + ): """ Generic test that can check vertical perspective validity with or without inverse flattening. @@ -30,12 +33,13 @@ def _test(self, inverse_flattening=False, replace_props=None, remove_props=None) # Make a dictionary of the non-ellipsoid properties to be added to both a test # coord-system, and a test grid-mapping cf_var. non_ellipsoid_kwargs = { - 'latitude_of_projection_origin': 0.0, - 'longitude_of_projection_origin': 2.0, - 'perspective_point_height': 2000000.0, - 'sweep_angle_axis': 'x', - 'false_easting': 100.0, - 'false_northing': 200.0} + "latitude_of_projection_origin": 0.0, + "longitude_of_projection_origin": 2.0, + "perspective_point_height": 2000000.0, + "sweep_angle_axis": "x", + "false_easting": 100.0, + "false_northing": 200.0, + } # Make specified adjustments to the non-ellipsoid properties. if remove_props: @@ -47,11 +51,11 @@ def _test(self, inverse_flattening=False, replace_props=None, remove_props=None) # Make a dictionary of ellipsoid properties, to be added to both a test # ellipsoid and the grid-mapping cf_var. - ellipsoid_kwargs = {'semi_major_axis': 6377563.396} + ellipsoid_kwargs = {"semi_major_axis": 6377563.396} if inverse_flattening: - ellipsoid_kwargs['inverse_flattening'] = 299.3249646 + ellipsoid_kwargs["inverse_flattening"] = 299.3249646 else: - ellipsoid_kwargs['semi_minor_axis'] = 6356256.909 + ellipsoid_kwargs["semi_minor_axis"] = 6356256.909 cf_grid_var_kwargs = non_ellipsoid_kwargs.copy() cf_grid_var_kwargs.update(ellipsoid_kwargs) @@ -68,10 +72,12 @@ def test_inverse_flattening(self): self._test(inverse_flattening=True) def test_false_offsets_missing(self): - self._test(remove_props=['false_easting', 'false_northing']) + self._test(remove_props=["false_easting", "false_northing"]) def test_false_offsets_none(self): - self._test(replace_props={'false_easting':None, 'false_northing':None}) + self._test( + replace_props={"false_easting": None, "false_northing": None} + ) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py similarity index 76% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_azimuthal_equal_area_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index f5346cbc68..05185a4cf5 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_lambert_azimuthal_equal_area_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_lambert_azimuthal_equal_area_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import LambertAzimuthalEqualArea -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_lambert_azimuthal_equal_area_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_lambert_azimuthal_equal_area_coordinate_system, +) class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(tests.IrisTest): @@ -41,31 +42,35 @@ def _test(self, inverse_flattening=False, no_optionals=False): latitude_of_projection_origin=test_lat, longitude_of_projection_origin=test_lon, false_easting=test_easting, - false_northing=test_northing) + false_northing=test_northing, + ) # Add ellipsoid args. - gridvar_props['semi_major_axis'] = 6377563.396 + gridvar_props["semi_major_axis"] = 6377563.396 if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) cf_grid_var = mock.Mock(spec=[], **gridvar_props) cs = build_lambert_azimuthal_equal_area_coordinate_system( - None, cf_grid_var) + None, cf_grid_var + ) expected = LambertAzimuthalEqualArea( latitude_of_projection_origin=test_lat, longitude_of_projection_origin=test_lon, false_easting=test_easting, false_northing=test_northing, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py similarity index 78% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_conformal_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index 458b60d36f..22bb7149b1 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_lambert_conformal_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_lambert_conformal_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import LambertConformal -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_lambert_conformal_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_lambert_conformal_coordinate_system, +) class TestBuildLambertConformalCoordinateSystem(tests.IrisTest): @@ -44,19 +45,21 @@ def _test(self, inverse_flattening=False, no_optionals=False): longitude_of_central_meridian=test_lon, false_easting=test_easting, false_northing=test_northing, - standard_parallel=test_parallels) + standard_parallel=test_parallels, + ) # Add ellipsoid args. - gridvar_props['semi_major_axis'] = 6377563.396 + gridvar_props["semi_major_axis"] = 6377563.396 if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) cf_grid_var = mock.Mock(spec=[], **gridvar_props) @@ -68,7 +71,8 @@ def _test(self, inverse_flattening=False, no_optionals=False): false_easting=test_easting, false_northing=test_northing, secant_latitudes=test_parallels, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py similarity index 74% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index f0a20c189f..2be5477cb7 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_mercator_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_mercator_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import Mercator -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_mercator_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_mercator_coordinate_system, +) class TestBuildMercatorCoordinateSystem(tests.IrisTest): @@ -27,16 +28,19 @@ def test_valid(self): spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin), + cf_grid_var.longitude_of_projection_origin + ), ellipsoid=iris.coord_systems.GeogCS( - cf_grid_var.semi_major_axis, - cf_grid_var.semi_minor_axis)) + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) self.assertEqual(cs, expected) def test_inverse_flattening(self): @@ -44,30 +48,37 @@ def test_inverse_flattening(self): spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, - inverse_flattening=299.3249646) + inverse_flattening=299.3249646, + ) cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin), + cf_grid_var.longitude_of_projection_origin + ), ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, - inverse_flattening=cf_grid_var.inverse_flattening)) + inverse_flattening=cf_grid_var.inverse_flattening, + ), + ) self.assertEqual(cs, expected) def test_longitude_missing(self): cf_grid_var = mock.Mock( spec=[], semi_major_axis=6377563.396, - inverse_flattening=299.3249646) + inverse_flattening=299.3249646, + ) cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, - inverse_flattening=cf_grid_var.inverse_flattening)) + inverse_flattening=cf_grid_var.inverse_flattening, + ) + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py similarity index 74% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 358958ce84..5058e4d7d3 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_sterographic_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_sterographic_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import Stereographic -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_stereographic_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_stereographic_coordinate_system, +) class TestBuildStereographicCoordinateSystem(tests.IrisTest): @@ -31,21 +32,23 @@ def _test(self, inverse_flattening=False, no_offsets=False): false_easting=test_easting, false_northing=test_northing, scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396) + semi_major_axis=6377563.396, + ) if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) if no_offsets: - del gridvar_props['false_easting'] - del gridvar_props['false_northing'] + del gridvar_props["false_easting"] + del gridvar_props["false_northing"] test_easting = 0 test_northing = 0 @@ -58,7 +61,8 @@ def _test(self, inverse_flattening=False, no_offsets=False): central_lon=cf_grid_var.longitude_of_projection_origin, false_easting=test_easting, false_northing=test_northing, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py similarity index 68% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_transverse_mercator_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index 7487168fba..0096c5df4b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_transverse_mercator_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_transverse_mercator_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import TransverseMercator -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_transverse_mercator_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_transverse_mercator_coordinate_system, +) class TestBuildTransverseMercatorCoordinateSystem(tests.IrisTest): @@ -32,22 +33,24 @@ def _test(self, inverse_flattening=False, no_options=False): false_easting=test_easting, false_northing=test_northing, scale_factor_at_central_meridian=test_scale_factor, - semi_major_axis=6377563.396) + semi_major_axis=6377563.396, + ) if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) if no_options: - del gridvar_props['false_easting'] - del gridvar_props['false_northing'] - del gridvar_props['scale_factor_at_central_meridian'] + del gridvar_props["false_easting"] + del gridvar_props["false_northing"] + del gridvar_props["scale_factor_at_central_meridian"] test_easting = 0 test_northing = 0 test_scale_factor = 1.0 @@ -58,13 +61,16 @@ def _test(self, inverse_flattening=False, no_options=False): expected = TransverseMercator( latitude_of_projection_origin=( - cf_grid_var.latitude_of_projection_origin), + cf_grid_var.latitude_of_projection_origin + ), longitude_of_central_meridian=( - cf_grid_var.longitude_of_central_meridian), + cf_grid_var.longitude_of_central_meridian + ), false_easting=test_easting, false_northing=test_northing, scale_factor_at_central_meridian=test_scale_factor, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py similarity index 63% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index 588b82fd99..f34992c2be 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_vertical_perspective_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_vertical_perspective_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import VerticalPerspective -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_vertical_perspective_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_vertical_perspective_coordinate_system, +) class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): @@ -30,24 +31,25 @@ def _test(self, inverse_flattening=False, no_offsets=False): test_easting = 100.0 test_northing = 200.0 cf_grid_var_kwargs = { - 'spec': [], - 'latitude_of_projection_origin': 1.0, - 'longitude_of_projection_origin': 2.0, - 'perspective_point_height': 2000000.0, - 'false_easting': test_easting, - 'false_northing': test_northing, - 'semi_major_axis': 6377563.396} - - ellipsoid_kwargs = {'semi_major_axis': 6377563.396} + "spec": [], + "latitude_of_projection_origin": 1.0, + "longitude_of_projection_origin": 2.0, + "perspective_point_height": 2000000.0, + "false_easting": test_easting, + "false_northing": test_northing, + "semi_major_axis": 6377563.396, + } + + ellipsoid_kwargs = {"semi_major_axis": 6377563.396} if inverse_flattening: - ellipsoid_kwargs['inverse_flattening'] = 299.3249646 + ellipsoid_kwargs["inverse_flattening"] = 299.3249646 else: - ellipsoid_kwargs['semi_minor_axis'] = 6356256.909 + ellipsoid_kwargs["semi_minor_axis"] = 6356256.909 cf_grid_var_kwargs.update(ellipsoid_kwargs) if no_offsets: - del cf_grid_var_kwargs['false_easting'] - del cf_grid_var_kwargs['false_northing'] + del cf_grid_var_kwargs["false_easting"] + del cf_grid_var_kwargs["false_northing"] test_easting = 0 test_northing = 0 @@ -56,14 +58,13 @@ def _test(self, inverse_flattening=False, no_offsets=False): cs = build_vertical_perspective_coordinate_system(None, cf_grid_var) expected = VerticalPerspective( - latitude_of_projection_origin=cf_grid_var. - latitude_of_projection_origin, - longitude_of_projection_origin=cf_grid_var. - longitude_of_projection_origin, + latitude_of_projection_origin=cf_grid_var.latitude_of_projection_origin, + longitude_of_projection_origin=cf_grid_var.longitude_of_projection_origin, perspective_point_height=cf_grid_var.perspective_point_height, false_easting=test_easting, false_northing=test_northing, - ellipsoid=ellipsoid) + ellipsoid=ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py similarity index 73% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index 2e493cdecc..a159ef81a8 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_cube_metadata`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +get_attr_units`. """ @@ -17,8 +17,7 @@ import numpy as np -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - get_attr_units +from iris.fileformats._nc_load_rules.helpers import get_attr_units class TestGetAttrUnits(tests.IrisTest): @@ -30,22 +29,23 @@ def _make_cf_var(global_attributes=None): cf_group = mock.Mock(global_attributes=global_attributes) cf_var = mock.MagicMock( - cf_name='sound_frequency', + cf_name="sound_frequency", cf_data=mock.Mock(spec=[]), standard_name=None, long_name=None, - units=u'\u266b', + units="\u266b", dtype=np.float64, cell_methods=None, - cf_group=cf_group) + cf_group=cf_group, + ) return cf_var def test_unicode_character(self): attributes = {} - expected_attributes = {'invalid_units': u'\u266b'} + expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, '?') + self.assertEqual(attr_units, "?") self.assertEqual(attributes, expected_attributes) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py similarity index 78% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index 26837b630d..ff9c51f40b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.get_cf_bounds_var`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +get_cf_bounds_var`. """ @@ -15,8 +15,11 @@ from unittest import mock -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - get_cf_bounds_var, CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY +from iris.fileformats._nc_load_rules.helpers import ( + CF_ATTR_BOUNDS, + CF_ATTR_CLIMATOLOGY, + get_cf_bounds_var, +) class TestGetCFBoundsVar(tests.IrisTest): @@ -25,7 +28,7 @@ class TestGetCFBoundsVar(tests.IrisTest): def _generic_test(self, test_climatological_bounds=False): cf_coord_var = mock.MagicMock() - cf_group_dict = {'TEST': mock.sentinel.bounds_var} + cf_group_dict = {"TEST": mock.sentinel.bounds_var} if test_climatological_bounds: cf_coord_var.cf_group.climatology = cf_group_dict test_attr = CF_ATTR_CLIMATOLOGY @@ -34,7 +37,7 @@ def _generic_test(self, test_climatological_bounds=False): test_attr = CF_ATTR_BOUNDS for attr in (CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY): - attr_val = 'TEST' if attr == test_attr else None + attr_val = "TEST" if attr == test_attr else None setattr(cf_coord_var, attr, attr_val) bounds_var, climatological = get_cf_bounds_var(cf_coord_var) @@ -48,5 +51,5 @@ def test_bounds_climatological(self): self._generic_test(test_climatological_bounds=True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py similarity index 61% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index a8e833cde9..3c7c496b54 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.get_names`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +get_names`. """ @@ -17,7 +17,7 @@ import numpy as np -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import get_names +from iris.fileformats._nc_load_rules.helpers import get_names class TestGetNames(tests.IrisTest): @@ -34,16 +34,18 @@ class TestGetNames(tests.IrisTest): standard_name. """ + @staticmethod def _make_cf_var(standard_name, long_name, cf_name): cf_var = mock.Mock( cf_name=cf_name, standard_name=standard_name, long_name=long_name, - units='degrees', + units="degrees", dtype=np.float64, cell_methods=None, - cf_group=mock.Mock(global_attributes={})) + cf_group=mock.Mock(global_attributes={}), + ) return cf_var def check_names(self, inputs, expected): @@ -53,11 +55,13 @@ def check_names(self, inputs, expected): # Expected - The expected names and attributes. exp_std_name, exp_long_name, exp_var_name, exp_attributes = expected - cf_var = self._make_cf_var(standard_name=standard_name, - long_name=long_name, cf_name=var_name) + cf_var = self._make_cf_var( + standard_name=standard_name, long_name=long_name, cf_name=var_name + ) attributes = {} res_standard_name, res_long_name, res_var_name = get_names( - cf_var, coord_name, attributes) + cf_var, coord_name, attributes + ) # Check the names and attributes are as expected. self.assertEqual(res_standard_name, exp_std_name) @@ -67,177 +71,220 @@ def check_names(self, inputs, expected): def test_var_name_valid(self): # Only var_name is set and it is set to a valid standard name. - inp = (None, None, 'grid_latitude', None) - exp = ('grid_latitude', None, 'grid_latitude', {}) + inp = (None, None, "grid_latitude", None) + exp = ("grid_latitude", None, "grid_latitude", {}) self.check_names(inp, exp) def test_var_name_valid_coord_name_set(self): # var_name is a valid standard name, coord_name is also set. - inp = (None, None, 'grid_latitude', 'latitude') - exp = ('latitude', None, 'grid_latitude', {}) + inp = (None, None, "grid_latitude", "latitude") + exp = ("latitude", None, "grid_latitude", {}) self.check_names(inp, exp) def test_var_name_invalid(self): # Only var_name is set but it is not a valid standard name. - inp = (None, None, 'lat_var_name', None) - exp = (None, None, 'lat_var_name', {}) + inp = (None, None, "lat_var_name", None) + exp = (None, None, "lat_var_name", {}) self.check_names(inp, exp) def test_var_name_invalid_coord_name_set(self): # var_name is not a valid standard name, the coord_name is also set. - inp = (None, None, 'lat_var_name', 'latitude') - exp = ('latitude', None, 'lat_var_name', {}) + inp = (None, None, "lat_var_name", "latitude") + exp = ("latitude", None, "lat_var_name", {}) self.check_names(inp, exp) def test_long_name_set_var_name_valid(self): # long_name is not None, var_name is set to a valid standard name. - inp = (None, 'lat_long_name', 'grid_latitude', None) - exp = ('grid_latitude', 'lat_long_name', 'grid_latitude', {}) + inp = (None, "lat_long_name", "grid_latitude", None) + exp = ("grid_latitude", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_long_name_set_var_name_valid_coord_name_set(self): # long_name is not None, var_name is set to a valid standard name, and # coord_name is set. - inp = (None, 'lat_long_name', 'grid_latitude', 'latitude') - exp = ('latitude', 'lat_long_name', 'grid_latitude', {}) + inp = (None, "lat_long_name", "grid_latitude", "latitude") + exp = ("latitude", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_long_name_set_var_name_invalid(self): # long_name is not None, var_name is not set to a valid standard name. - inp = (None, 'lat_long_name', 'lat_var_name', None) - exp = (None, 'lat_long_name', 'lat_var_name', {}) + inp = (None, "lat_long_name", "lat_var_name", None) + exp = (None, "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_long_name_set_var_name_invalid_coord_name_set(self): # long_name is not None, var_name is not set to a valid standard name, # and coord_name is set. - inp = (None, 'lat_long_name', 'lat_var_name', 'latitude') - exp = ('latitude', 'lat_long_name', 'lat_var_name', {}) + inp = (None, "lat_long_name", "lat_var_name", "latitude") + exp = ("latitude", "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_valid(self): # standard_name is a valid standard name, var_name is a valid standard # name. - inp = ('projection_y_coordinate', None, 'grid_latitude', None) - exp = ('projection_y_coordinate', None, 'grid_latitude', {}) + inp = ("projection_y_coordinate", None, "grid_latitude", None) + exp = ("projection_y_coordinate", None, "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_valid_coord_name_set(self): # standard_name is a valid standard name, var_name is a valid standard # name, coord_name is set. - inp = ('projection_y_coordinate', None, 'grid_latitude', 'latitude') - exp = ('projection_y_coordinate', None, 'grid_latitude', {}) + inp = ("projection_y_coordinate", None, "grid_latitude", "latitude") + exp = ("projection_y_coordinate", None, "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_invalid(self): # standard_name is a valid standard name, var_name is not a valid # standard name. - inp = ('projection_y_coordinate', None, 'lat_var_name', None) - exp = ('projection_y_coordinate', None, 'lat_var_name', {}) + inp = ("projection_y_coordinate", None, "lat_var_name", None) + exp = ("projection_y_coordinate", None, "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_invalid_coord_name_set(self): # standard_name is a valid standard name, var_name is not a valid # standard name, coord_name is set. - inp = ('projection_y_coordinate', None, 'lat_var_name', 'latitude') - exp = ('projection_y_coordinate', None, 'lat_var_name', {}) + inp = ("projection_y_coordinate", None, "lat_var_name", "latitude") + exp = ("projection_y_coordinate", None, "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_valid(self): # standard_name is a valid standard name, long_name is not None, # var_name is a valid standard name. - inp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', - None) - exp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "grid_latitude", + None, + ) + exp = ("projection_y_coordinate", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_valid_coord_name_set(self): # standard_name is a valid standard name, long_name is not None, # var_name is a valid standard name, coord_name is set. - inp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', - 'latitude') - exp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "grid_latitude", + "latitude", + ) + exp = ("projection_y_coordinate", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_invalid(self): # standard_name is a valid standard name, long_name is not None, # var_name is not a valid standard name. - inp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', - None) - exp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "lat_var_name", + None, + ) + exp = ("projection_y_coordinate", "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_invalid_coord_name_set( - self): + self, + ): # standard_name is a valid standard name, long_name is not None, # var_name is not a valid standard name, coord_name is set. - inp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', - 'latitude') - exp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "lat_var_name", + "latitude", + ) + exp = ("projection_y_coordinate", "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_invalid_var_name_valid(self): # standard_name is not a valid standard name, var_name is a valid # standard name. - inp = ('latitude_coord', None, 'grid_latitude', None) - exp = ('grid_latitude', None, 'grid_latitude', {}) + inp = ("latitude_coord", None, "grid_latitude", None) + exp = ("grid_latitude", None, "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_invalid_var_name_valid_coord_name_set(self): # standard_name is not a valid standard name, var_name is a valid # standard name, coord_name is set. - inp = ('latitude_coord', None, 'grid_latitude', 'latitude') - exp = ('latitude', None, 'grid_latitude', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", None, "grid_latitude", "latitude") + exp = ( + "latitude", + None, + "grid_latitude", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_var_name_invalid(self): # standard_name is not a valid standard name, var_name is not a valid # standard name. - inp = ('latitude_coord', None, 'lat_var_name', None) - exp = (None, None, 'lat_var_name', {}) + inp = ("latitude_coord", None, "lat_var_name", None) + exp = (None, None, "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_invalid_var_name_invalid_coord_name_set(self): # standard_name is not a valid standard name, var_name is not a valid # standard name, coord_name is set. - inp = ('latitude_coord', None, 'lat_var_name', 'latitude') - exp = ('latitude', None, 'lat_var_name', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", None, "lat_var_name", "latitude") + exp = ( + "latitude", + None, + "lat_var_name", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_valid(self): # standard_name is not a valid standard name, long_name is not None # var_name is a valid standard name. - inp = ('latitude_coord', 'lat_long_name', 'grid_latitude', None) - exp = ('grid_latitude', 'lat_long_name', 'grid_latitude', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "grid_latitude", None) + exp = ( + "grid_latitude", + "lat_long_name", + "grid_latitude", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_valid_coord_name_set( - self): + self, + ): # standard_name is not a valid standard name, long_name is not None, # var_name is a valid standard name, coord_name is set. - inp = ('latitude_coord', 'lat_long_name', 'grid_latitude', 'latitude') - exp = ('latitude', 'lat_long_name', 'grid_latitude', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "grid_latitude", "latitude") + exp = ( + "latitude", + "lat_long_name", + "grid_latitude", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_invalid(self): # standard_name is not a valid standard name, long_name is not None # var_name is not a valid standard name. - inp = ('latitude_coord', 'lat_long_name', 'lat_var_name', None) - exp = (None, 'lat_long_name', 'lat_var_name', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "lat_var_name", None) + exp = ( + None, + "lat_long_name", + "lat_var_name", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_invalid_coord_name_set( - self): + self, + ): # standard_name is not a valid standard name, long_name is not None, # var_name is not a valid standard name, coord_name is set. - inp = ('latitude_coord', 'lat_long_name', 'lat_var_name', 'latitude') - exp = ('latitude', 'lat_long_name', 'lat_var_name', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "lat_var_name", "latitude") + exp = ( + "latitude", + "lat_long_name", + "lat_var_name", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py similarity index 82% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index 2e1d315de4..dfe2895f29 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -4,22 +4,22 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.has_supported_mercator_parameters`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +has_supported_mercator_parameters`. """ +from unittest import mock import warnings +from iris.fileformats._nc_load_rules.helpers import ( + has_supported_mercator_parameters, +) + # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip -from unittest import mock - -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - has_supported_mercator_parameters - def _engine(cf_grid_var, cf_name): cf_group = {cf_name: cf_grid_var} @@ -28,9 +28,8 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid(self): - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=-90, @@ -38,7 +37,8 @@ def test_valid(self): false_northing=0, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) @@ -48,7 +48,7 @@ def test_valid(self): def test_invalid_scale_factor(self): # Iris does not yet support scale factors other than one for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -56,7 +56,8 @@ def test_invalid_scale_factor(self): false_northing=0, scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -65,12 +66,12 @@ def test_invalid_scale_factor(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'Scale factor') + self.assertRegex(str(warns[0]), "Scale factor") def test_invalid_standard_parallel(self): # Iris does not yet support standard parallels other than zero for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -78,7 +79,8 @@ def test_invalid_standard_parallel(self): false_northing=0, standard_parallel=30, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -87,12 +89,12 @@ def test_invalid_standard_parallel(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'Standard parallel') + self.assertRegex(str(warns[0]), "Standard parallel") def test_invalid_false_easting(self): # Iris does not yet support false eastings other than zero for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -100,7 +102,8 @@ def test_invalid_false_easting(self): false_northing=0, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -109,12 +112,12 @@ def test_invalid_false_easting(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'False easting') + self.assertRegex(str(warns[0]), "False easting") def test_invalid_false_northing(self): # Iris does not yet support false northings other than zero for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -122,7 +125,8 @@ def test_invalid_false_northing(self): false_northing=100, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -131,7 +135,7 @@ def test_invalid_false_northing(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'False northing') + self.assertRegex(str(warns[0]), "False northing") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py similarity index 81% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py index fd588b6fcb..8bec823f4b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py @@ -4,22 +4,22 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.has_supported_stereographic_parameters`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +has_supported_stereographic_parameters`. """ +from unittest import mock import warnings +from iris.fileformats._nc_load_rules.helpers import ( + has_supported_stereographic_parameters, +) + # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip -from unittest import mock - -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - has_supported_stereographic_parameters - def _engine(cf_grid_var, cf_name): cf_group = {cf_name: cf_grid_var} @@ -29,7 +29,7 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedStereographicParameters(tests.IrisTest): def test_valid(self): - cf_name = 'stereographic' + cf_name = "stereographic" cf_grid_var = mock.Mock( spec=[], latitude_of_projection_origin=0, @@ -38,7 +38,8 @@ def test_valid(self): false_northing=200, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) is_valid = has_supported_stereographic_parameters(engine, cf_name) @@ -48,7 +49,7 @@ def test_valid(self): def test_invalid_scale_factor(self): # Iris does not yet support scale factors other than one for # stereographic projections - cf_name = 'stereographic' + cf_name = "stereographic" cf_grid_var = mock.Mock( spec=[], latitude_of_projection_origin=0, @@ -57,7 +58,8 @@ def test_invalid_scale_factor(self): false_northing=200, scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -66,7 +68,7 @@ def test_invalid_scale_factor(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'Scale factor') + self.assertRegex(str(warns[0]), "Scale factor") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py similarity index 60% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_reorder_bounds_data.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 83843cf782..1ee0cfbf2e 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.reorder_bounds_data`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +reorder_bounds_data`. """ @@ -17,16 +17,16 @@ import numpy as np -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - reorder_bounds_data +from iris.fileformats._nc_load_rules.helpers import reorder_bounds_data class Test(tests.IrisTest): def test_fastest_varying(self): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock(dimensions=('foo', 'bar', 'nv'), - cf_name='wibble_bnds') - cf_coord_var = mock.Mock(dimensions=('foo', 'bar')) + cf_bounds_var = mock.Mock( + dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" + ) + cf_coord_var = mock.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Vertex dimension (nv) is already at the end. @@ -34,8 +34,8 @@ def test_fastest_varying(self): def test_slowest_varying(self): bounds_data = np.arange(24).reshape(4, 2, 3) - cf_bounds_var = mock.Mock(dimensions=('nv', 'foo', 'bar')) - cf_coord_var = mock.Mock(dimensions=('foo', 'bar')) + cf_bounds_var = mock.Mock(dimensions=("nv", "foo", "bar")) + cf_coord_var = mock.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Move zeroth dimension (nv) to the end. @@ -44,12 +44,13 @@ def test_slowest_varying(self): def test_different_dim_names(self): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock(dimensions=('foo', 'bar', 'nv'), - cf_name='wibble_bnds') - cf_coord_var = mock.Mock(dimensions=('x', 'y'), cf_name='wibble') - with self.assertRaisesRegex(ValueError, 'dimension names'): + cf_bounds_var = mock.Mock( + dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" + ) + cf_coord_var = mock.Mock(dimensions=("x", "y"), cf_name="wibble") + with self.assertRaisesRegex(ValueError, "dimension names"): reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/pyproject.toml b/pyproject.toml index 58ce1daba1..0a672b86bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,6 @@ [build-system] # Defined by PEP 518 requires = [ - "scitools-pyke", "setuptools>=40.8.0", "wheel", ] @@ -16,7 +15,6 @@ include = '\.pyi?$' extend-exclude = ''' ( /( - | pyke_rules | sphinxext | tools )/ @@ -32,8 +30,6 @@ line_length = 79 profile = "black" extend_skip = [ "_build", - "compiled_krb", - "fc_rules_cf.krb", "generated", "sphinxext", "tools", diff --git a/requirements/ci/nox.lock/py37-linux-64.lock b/requirements/ci/nox.lock/py37-linux-64.lock index 016a9ebb69..b1cc74adba 100644 --- a/requirements/ci/nox.lock/py37-linux-64.lock +++ b/requirements/ci/nox.lock/py37-linux-64.lock @@ -1,5 +1,5 @@ # platform: linux-64 -# env_hash: 846d5ea3acab5e11a9cd84738a73737ed3b365db07cc9b6825611e23c6db0e3d +# env_hash: 6c711a9771dd36da4e85fae63fd584e6a1bba39d304d13e747db86a4f861804e @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.5.30-ha878542_0.tar.bz2#6a777890e94194dc94a29a76d2a7e721 @@ -125,7 +125,6 @@ https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 https://conda.anaconda.org/conda-forge/linux-64/proj-7.2.0-h277dcde_2.tar.bz2#db654ee11298d3463bad67445707654c https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2#aa798d50ffd182a0f6f31478c7f434f6 -https://conda.anaconda.org/conda-forge/noarch/pyke-1.1.1-pyhd8ed1ab_1004.tar.bz2#5f0236abfbb6d53826d1afed1e64f82e https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2#626c4f20d5bf06dcec9cf2eaa31725c7 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.7-2_cp37m.tar.bz2#afff88bf9a7048da740c70aeb8cdbb82 @@ -157,7 +156,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.11.0-py37h5e8e339_3.ta https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h48d8840_2.tar.bz2#eba672c69baf366fdedd1c6f702dbb81 https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py37h89c1867_3.tar.bz2#3da23bcf1d502670cec18fd3a04f409b https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.8.1-h83ec7ef_0.tar.bz2#654935b08e8bd4a8cbf6a4253e290c04 -https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.5.0-py37h89c1867_0.tar.bz2#71a9d20403f28d15f7a94d0817584efa +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.6.0-py37h89c1867_0.tar.bz2#6d600925b3ec1d7bf9517eacfa839bd0 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.1-py37h2527ec5_1.tar.bz2#61149814e0ea71cb5b44881c65d25f7b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.0-mpi_mpich_hf07302c_2.tar.bz2#d76a3f327eb8e26b5ce6b042ac1abeb3 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py37h5e8e339_0.tar.bz2#90ad307f6997784664de956e09ec689e @@ -182,7 +181,7 @@ https://conda.anaconda.org/conda-forge/linux-64/click-8.0.1-py37h89c1867_0.tar.b https://conda.anaconda.org/conda-forge/linux-64/cryptography-3.4.7-py37h5d9358c_0.tar.bz2#d811fb6a96ae0cf8c0a17457a8e67ff4 https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.6.2-pyhd8ed1ab_0.tar.bz2#a5a365e004f7cb59d652254800cc40b7 https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py37h2527ec5_1.tar.bz2#100918f43247cedad74f2cf8dcbda5bc -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-4.5.0-hd8ed1ab_0.tar.bz2#37284dc55911fdf9b0b5e6fed56fb192 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-4.6.0-hd8ed1ab_0.tar.bz2#bb345f822c508e2bc5138c975667256c https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.1-pyhd8ed1ab_0.tar.bz2#c647e77921fd3e245cdcc5b2d451a0f8 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py37h902c9e0_1005.tar.bz2#40db532422636dd1e980154114486a00 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h196b126_4.tar.bz2#e058f42a78ea8c965cf7335e28143c59 @@ -191,7 +190,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.5-hb8ff022_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.1.1-py37h6f94858_1004.tar.bz2#42b37830a63405589fef3d13db505e7d https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.1.1-py37h902c9e0_3.tar.bz2#104648a5a091a493046a62704eef5c49 https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.6.3-py37h29e03ee_0.tar.bz2#a469d02f72b9cef07f4408d419b17dcc +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.0-py37h29e03ee_0.tar.bz2#685172ce967c6877e22c1a8907366267 https://conda.anaconda.org/conda-forge/linux-64/setuptools-49.6.0-py37h89c1867_3.tar.bz2#928c178bf6805b8ab71fabaa620e0234 https://conda.anaconda.org/conda-forge/linux-64/shapely-1.7.1-py37h2d1e849_5.tar.bz2#451beb59aca4c165e68fbe8be3a37149 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 @@ -208,7 +207,7 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.50.7-hc3c00ef_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.4.2-py37hdd32ed1_0.tar.bz2#ee755b80aae171058a46c5d7badd08ff https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.7-nompi_py37h946d57d_100.tar.bz2#217487caeb2c4cecb25f86d99cbe53b6 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/noarch/pip-21.1.2-pyhd8ed1ab_0.tar.bz2#dbd830edaffe5fc9ae6c1d425db2b5f2 +https://conda.anaconda.org/conda-forge/noarch/pip-21.1.3-pyhd8ed1ab_0.tar.bz2#231bd0af116f55ca4d17ea0869415fdf https://conda.anaconda.org/conda-forge/noarch/pygments-2.9.0-pyhd8ed1ab_0.tar.bz2#a2d9bba43c9b80a42b0ccb9afd7223c2 https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2#92371c25994d0f5d28a01c1fb75ebf86 https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py37he336c9b_7.tar.bz2#303251d6f2b9e60a0cd79480cf8507d2 diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index a6ad914b71..55ef7d7723 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -1,5 +1,5 @@ # platform: linux-64 -# env_hash: 35b1c159ac1a6d931e48b613bf6f328700533ebf5f968ea86bcf1fd4e43c777a +# env_hash: fbb4556337a9f497fb9d021a1e2d2e8eae9f52dfc1d7859ee0f63d66e1842b0c @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.5.30-ha878542_0.tar.bz2#6a777890e94194dc94a29a76d2a7e721 @@ -124,7 +124,6 @@ https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 https://conda.anaconda.org/conda-forge/linux-64/proj-7.2.0-h277dcde_2.tar.bz2#db654ee11298d3463bad67445707654c https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2#aa798d50ffd182a0f6f31478c7f434f6 -https://conda.anaconda.org/conda-forge/noarch/pyke-1.1.1-pyhd8ed1ab_1004.tar.bz2#5f0236abfbb6d53826d1afed1e64f82e https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2#626c4f20d5bf06dcec9cf2eaa31725c7 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 @@ -186,7 +185,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.5-hb8ff022_0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38hb5d20a5_0.tar.bz2#cc6852249c01884469560082943b689f https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.1.1-py38h5c078b8_3.tar.bz2#dafeef887e68bd18ec84681747ca0fd5 https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.6.3-py38h7b17777_0.tar.bz2#8055079ed82e1ada1cc4714c26d04802 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.0-py38h7b17777_0.tar.bz2#0b7f0bd8baf6557f140e0f634e90d067 https://conda.anaconda.org/conda-forge/linux-64/setuptools-49.6.0-py38h578d9bd_3.tar.bz2#59c561cd1be0db9cf1c83f7d7cc74f4d https://conda.anaconda.org/conda-forge/linux-64/shapely-1.7.1-py38haeee4fe_5.tar.bz2#2e633d8e2257f3c0e465c858ce2ddbc6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 @@ -202,7 +201,7 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.50.7-hc3c00ef_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.4.2-py38hcc49a3a_0.tar.bz2#4bfb6818a1fce6d4129fdf121f788505 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.7-nompi_py38h5e9db54_100.tar.bz2#5f86dd7381e37db378068abd7707cd57 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/noarch/pip-21.1.2-pyhd8ed1ab_0.tar.bz2#dbd830edaffe5fc9ae6c1d425db2b5f2 +https://conda.anaconda.org/conda-forge/noarch/pip-21.1.3-pyhd8ed1ab_0.tar.bz2#231bd0af116f55ca4d17ea0869415fdf https://conda.anaconda.org/conda-forge/noarch/pygments-2.9.0-pyhd8ed1ab_0.tar.bz2#a2d9bba43c9b80a42b0ccb9afd7223c2 https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2#92371c25994d0f5d28a01c1fb75ebf86 https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h7400c14_7.tar.bz2#8fe28c949b01e3d69c2b357b5abf3916 diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml index fb9f5b38f8..fac21560a4 100644 --- a/requirements/ci/py37.yml +++ b/requirements/ci/py37.yml @@ -8,7 +8,6 @@ dependencies: # Setup dependencies. - setuptools>=40.8.0 - - pyke # Core dependencies. - cartopy>=0.18 diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index ed81a7aaa8..4be43fdba6 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -8,7 +8,6 @@ dependencies: # Setup dependencies. - setuptools>=40.8.0 - - pyke # Core dependencies. - cartopy>=0.18 diff --git a/setup.cfg b/setup.cfg index b169303498..e7e96fa5e8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,7 +55,6 @@ install_requires = netcdf4 numpy>=1.14 scipy - scitools-pyke xxhash packages = find: package_dir = @@ -123,7 +122,6 @@ exclude = # .eggs, build, - compiled_krb, docs/src/sphinxext/*, tools/*, # diff --git a/setup.py b/setup.py index 6ecc956430..f48f3fe25a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,6 @@ import os from shutil import copyfile import sys -import textwrap from setuptools import Command, setup from setuptools.command.build_py import build_py @@ -63,50 +62,6 @@ def run(self): os.remove(compiled_path) -def compile_pyke_rules(cmd, directory): - # Call out to the python executable to pre-compile the Pyke rules. - # Significant effort was put in to trying to get these to compile - # within this build process but there was no obvious way of finding - # a workaround to the issue presented in - # https://github.com/SciTools/iris/issues/2481. - - shelled_code = textwrap.dedent( - """\ - - import os - - # Monkey patch the load method to avoid "ModuleNotFoundError: No module - # named 'iris.fileformats._pyke_rules.compiled_krb'". In this instance - # we simply don't want the knowledge engine, so we turn the load method - # into a no-op. - from pyke.target_pkg import target_pkg - target_pkg.load = lambda *args, **kwargs: None - - # Compile the rules by hand, without importing iris. That way we can - # avoid the need for all of iris' dependencies being installed. - os.chdir(os.path.join('{bld_dir}', 'iris', 'fileformats', '_pyke_rules')) - - # Import pyke *after* changing directory. Without this we get the compiled - # rules in the wrong place. Identified in - # https://github.com/SciTools/iris/pull/2891#issuecomment-341404187 - from pyke import knowledge_engine - knowledge_engine.engine('') - - """.format( - bld_dir=directory - ) - ).split("\n") - shelled_code = "; ".join( - [ - line - for line in shelled_code - if not line.strip().startswith("#") and line.strip() - ] - ) - args = [sys.executable, "-c", shelled_code] - cmd.spawn(args) - - def copy_copyright(cmd, directory): # Copy the COPYRIGHT information into the package root iris_build_dir = os.path.join(directory, "iris") @@ -154,20 +109,13 @@ def run(self): custom_commands = { "test": SetupTestRunner, - "develop": custom_cmd(develop_cmd, [build_std_names, compile_pyke_rules]), - "build_py": custom_cmd( - build_py, [build_std_names, compile_pyke_rules, copy_copyright] - ), + "develop": custom_cmd(develop_cmd, [build_std_names]), + "build_py": custom_cmd(build_py, [build_std_names, copy_copyright]), "std_names": custom_cmd( BaseCommand, [build_std_names], help_doc="generate CF standard name module", ), - "pyke_rules": custom_cmd( - BaseCommand, - [compile_pyke_rules], - help_doc="compile CF-NetCDF loader rules", - ), "clean_source": CleanSource, }