Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ccpp_prebuild.py: speed up recompiling unchanged code (again) #357

Merged
merged 1 commit into from
Mar 31, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 19 additions & 14 deletions scripts/ccpp_prebuild.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def parse_arguments():
clean = args.clean
debug = args.debug
if args.suites:
sdfs = [ 'suite_{0}.xml'.format(x) for x in args.suites.split(',')]
sdfs = ['suite_{0}.xml'.format(x) for x in args.suites.split(',')]
else:
sdfs = None
builddir = args.builddir
Expand Down Expand Up @@ -264,8 +264,8 @@ def gather_variable_definitions(variable_definition_files, typedefs_new_metadata
#
logging.info('Parsing metadata tables for variables provided by host model ...')
success = True
metadata_define = {}
dependencies_define = {}
metadata_define = collections.OrderedDict()
dependencies_define = collections.OrderedDict()
for variable_definition_file in variable_definition_files:
(filedir, filename) = os.path.split(os.path.abspath(variable_definition_file))
# Change to directory of variable_definition_file and parse it
Expand Down Expand Up @@ -304,10 +304,10 @@ def collect_physics_subroutines(scheme_files):
logging.info('Parsing metadata tables in physics scheme files ...')
success = True
# Parse all scheme files: record metadata, argument list, dependencies, and which scheme is in which file
metadata_request = {}
arguments_request = {}
dependencies_request = {}
schemes_in_files = {}
metadata_request = collections.OrderedDict()
arguments_request = collections.OrderedDict()
dependencies_request = collections.OrderedDict()
schemes_in_files = collections.OrderedDict()
for scheme_file in scheme_files:
scheme_file_with_abs_path = os.path.abspath(scheme_file)
(scheme_filepath, scheme_filename) = os.path.split(scheme_file_with_abs_path)
Expand All @@ -331,10 +331,10 @@ def filter_metadata(metadata, arguments, dependencies, schemes_in_files, suites)
also remove information on argument lists, dependencies and schemes in files"""
success = True
# Output: filtered dictionaries
metadata_filtered = {}
arguments_filtered = {}
dependencies_filtered = {}
schemes_in_files_filtered = {}
metadata_filtered = collections.OrderedDict()
arguments_filtered = collections.OrderedDict()
dependencies_filtered = collections.OrderedDict()
schemes_in_files_filtered = collections.OrderedDict()
# Loop through all variables and check if the calling subroutine is in list of subroutines
for var_name in sorted(metadata.keys()):
keep = False
Expand Down Expand Up @@ -421,8 +421,8 @@ def check_optional_arguments(metadata, arguments, optional_arguments):
logging.error('Invalid identifier {0} in container value {1} of requested variable {2}'.format(
subitems[0], var.container, var_name))
if scheme_name_test == scheme_name and subroutine_name_test == subroutine_name and not var.optional in ['t', 'T']:
raise Exception("Variable {} in {} / {}".format(var_name, scheme_name, subroutine_name) + \
" is not an optional argument, but listed as such in the CCPP prebuild config")
raise Exception("Variable {} in {}/{}".format(var_name, scheme_name, subroutine_name) + \
" is not an optional argument, but listed as such in the CCPP prebuild config")

for var_name in sorted(metadata.keys()):
# The notation metadata[var_name][:] is a convenient way to make a copy
Expand Down Expand Up @@ -477,7 +477,7 @@ def compare_metadata(metadata_define, metadata_request):
logging.info('Comparing metadata for requested and provided variables ...')
success = True
modules = []
metadata = {}
metadata = collections.OrderedDict()
for var_name in sorted(metadata_request.keys()):
# Check that variable is provided by the model
if not var_name in metadata_define.keys():
Expand Down Expand Up @@ -635,6 +635,9 @@ def generate_typedefs_makefile(metadata_define, typedefs_makefile, typedefs_cmak
cmakefile.filename = typedefs_cmakefile + '.tmp'
sourcefile = TypedefsSourcefile()
sourcefile.filename = typedefs_sourcefile + '.tmp'
# Sort typedefs so that the order remains the same (for cmake to avoid) recompiling
typedefs.sort()
# Generate list of type definitions
makefile.write(typedefs)
cmakefile.write(typedefs)
sourcefile.write(typedefs)
Expand Down Expand Up @@ -705,6 +708,8 @@ def generate_caps_makefile(caps, caps_makefile, caps_cmakefile, caps_sourcefile,
cmakefile.filename = caps_cmakefile + '.tmp'
sourcefile = CapsSourcefile()
sourcefile.filename = caps_sourcefile + '.tmp'
# Sort caps so that the order remains the same (for cmake to avoid) recompiling
caps.sort()
# Generate list of caps with absolute path
caps_with_abspath = [ os.path.abspath(os.path.join(caps_dir, cap)) for cap in caps ]
makefile.write(caps_with_abspath)
Expand Down
19 changes: 10 additions & 9 deletions scripts/metadata_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ def merge_dictionaries(x, y):
that all entries are compatible. If one or more elements exist
in both x and y, we therefore have to test compatibility of
one of the items in each dictionary only."""
z = {}
z = collections.OrderedDict()
x_keys = sorted(x.keys())
y_keys = sorted(y.keys())
z_keys = sorted(list(set(x_keys + y_keys)))
for key in z_keys:
z[key] = {}
z[key] = collections.OrderedDict()
if key in x_keys and key in y_keys:
# Metadata dictionaries containing lists of variables of type Var for each key=standard_name
if isinstance(x[key][0], Var):
Expand Down Expand Up @@ -272,15 +272,15 @@ def parse_variable_tables(filepath, filename):
del file_lines

# Find all modules within the file, and save the start and end lines
module_lines = {}
module_lines = collections.OrderedDict()
line_counter = 0
for line in lines:
words = line.split()
if len(words) > 1 and words[0].lower() in ['module', 'program'] and not words[1].lower() == 'procedure':
module_name = words[1].strip()
if module_name in registry.keys():
raise Exception('Duplicate module name {0}'.format(module_name))
registry[module_name] = {}
registry[module_name] = collections.OrderedDict()
module_lines[module_name] = { 'startline' : line_counter }
elif len(words) > 1 and words[0].lower() == 'end' and words[1].lower() in ['module', 'program']:
try:
Expand Down Expand Up @@ -514,7 +514,6 @@ def parse_scheme_tables(filepath, filename):
metadata = collections.OrderedDict()

# Registry of modules and derived data types in file
#registry = {}
registry = collections.OrderedDict()

# Argument lists of each subroutine in the file
Expand Down Expand Up @@ -549,7 +548,7 @@ def parse_scheme_tables(filepath, filename):
del file_lines

# Find all modules within the file, and save the start and end lines
module_lines = {}
module_lines = collections.OrderedDict()
line_counter = 0
for line in lines:
# For the purpose of identifying module constructs, remove any trailing comments from line
Expand All @@ -560,7 +559,7 @@ def parse_scheme_tables(filepath, filename):
module_name = words[1].strip()
if module_name in registry.keys():
raise Exception('Duplicate module name {0}'.format(module_name))
registry[module_name] = {}
registry[module_name] = collections.OrderedDict()
module_lines[module_name] = { 'startline' : line_counter }
elif len(words) > 1 and words[0].lower() == 'end' and words[1].lower() == 'module':
try:
Expand Down Expand Up @@ -609,7 +608,7 @@ def parse_scheme_tables(filepath, filename):
raise Exception('Scheme name differs from module name: module_name="{0}" vs. scheme_name="{1}"'.format(
module_name, scheme_name))
if not scheme_name in registry[module_name].keys():
registry[module_name][scheme_name] = {}
registry[module_name][scheme_name] = collections.OrderedDict()
if subroutine_name in registry[module_name][scheme_name].keys():
raise Exception('Duplicate subroutine name {0} in module {1}'.format(
subroutine_name, module_name))
Expand Down Expand Up @@ -645,7 +644,7 @@ def parse_scheme_tables(filepath, filename):
for subroutine_name in registry[module_name][scheme_name].keys():
# Record the order of variables in the call list to each subroutine in a list
if not scheme_name in arguments.keys():
arguments[scheme_name] = {}
arguments[scheme_name] = collections.OrderedDict()
if not subroutine_name in arguments[scheme_name].keys():
arguments[scheme_name][subroutine_name] = []
# Find the argument table corresponding to each subroutine by searching
Expand Down Expand Up @@ -725,6 +724,8 @@ def parse_scheme_tables(filepath, filename):
if not var_name in arguments[scheme_name][subroutine_name]:
raise Exception('Mandatory CCPP variable {0} not declared in metadata table of subroutine {1}'.format(
var_name, subroutine_name))
# Sort the dependencies to avoid differences in the auto-generated code
dependencies[scheme_name].sort()

# Debugging output to screen and to XML
if debug and len(metadata.keys()) > 0:
Expand Down
11 changes: 6 additions & 5 deletions scripts/mkstatic.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ def create_arguments_module_use_var_defs(variable_dictionary, metadata_define, t

# Add any local variables (required for unit conversions, array transformations, ...)
if tmpvars:
var_defs.append('')
var_defs.append('! Local variables for unit conversions, array transformations, ...')
for tmpvar in tmpvars:
var_defs.append(tmpvar.print_def_local())
Expand Down Expand Up @@ -506,7 +507,7 @@ def __init__(self, **kwargs):
self._caps = None
self._module = None
self._subroutines = None
self._parents = { ccpp_stage : {} for ccpp_stage in CCPP_STAGES.keys() }
self._parents = { ccpp_stage : collections.OrderedDict() for ccpp_stage in CCPP_STAGES.keys() }
self._arguments = { ccpp_stage : [] for ccpp_stage in CCPP_STAGES.keys() }
self._update_cap = True
for key, value in kwargs.items():
Expand Down Expand Up @@ -885,15 +886,15 @@ def __init__(self, **kwargs):
self._module = None
self._subroutines = None
self._pset = None
self._parents = { ccpp_stage : {} for ccpp_stage in CCPP_STAGES }
self._parents = { ccpp_stage : collections.OrderedDict() for ccpp_stage in CCPP_STAGES }
self._arguments = { ccpp_stage : [] for ccpp_stage in CCPP_STAGES }
self._update_cap = True
for key, value in kwargs.items():
setattr(self, "_"+key, value)

def write(self, metadata_request, metadata_define, arguments):
# Create an inverse lookup table of local variable names defined (by the host model) and standard names
standard_name_by_local_name_define = {}
standard_name_by_local_name_define = collections.OrderedDict()
for standard_name in metadata_define.keys():
standard_name_by_local_name_define[metadata_define[standard_name][0].local_name] = standard_name

Expand All @@ -915,10 +916,10 @@ def write(self, metadata_request, metadata_define, arguments):
elif self._finalize and not ccpp_stage == 'finalize':
continue
# For mapping local variable names to standard names
local_vars = {}
local_vars = collections.OrderedDict()
# For mapping temporary variable names (for unit conversions, etc) to local variable names
tmpvar_cnt = 0
tmpvars = {}
tmpvars = collections.OrderedDict()
#
body = ''
# Variable definitions automatically added for subroutines
Expand Down