Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Timelog #632

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added src/test.txt
Empty file.
128 changes: 105 additions & 23 deletions src/troute_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
import pandas as pd
from pathlib import Path #TODO: remove?
import yaml

import time
import nwm_routing.__main__ as tr



class troute_model():

def __init__(self, bmi_cfg_file):
Expand All @@ -27,7 +28,9 @@ def __init__(self, bmi_cfg_file):
'_segment_attributes', '_waterbody_attributes', '_network',
'_data_assimilation', '_fvd', '_lakeout']


(
self._log_parameters,
self._preprocessing_parameters,
self._supernetwork_parameters,
self._waterbody_parameters,
Expand All @@ -38,7 +41,6 @@ def __init__(self, bmi_cfg_file):
self._output_parameters,
self._parity_parameters,
self._data_assimilation_parameters,
self._bmi_parameters,
) = _read_config_file(bmi_cfg_file)

self._run_parameters = {
Expand All @@ -56,6 +58,18 @@ def __init__(self, bmi_cfg_file):
self._waterbody_attributes = ['waterbody_id','waterbody_toid','LkArea','LkMxE','OrificeA',
'OrificeC','OrificeE','WeirC','WeirE','WeirL','ifd',
'reservoir_type']

self.showtiming = self._log_parameters.get("showtiming", None)
if self.showtiming:
self.task_times = { # Creating a dictionary to record time for each section #
'network_time': 0,
'forcing_time' : 0,
'output_time' : 0,
'run_time' :0,
'data_assimilation_time' : 0,

}
# self.main_start_time = time.time()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you delete this line rather than commenting out?


def preprocess_static_vars(self, values: dict):
"""
Expand All @@ -68,6 +82,9 @@ def preprocess_static_vars(self, values: dict):
Returns
-------
"""
if self.showtiming:
network_start_time = time.time() # start of recording time for network creation #

self._network = tr.HYFeaturesNetwork(
self._supernetwork_parameters,
waterbody_parameters=self._waterbody_parameters,
Expand All @@ -77,8 +94,23 @@ def preprocess_static_vars(self, values: dict):
compute_parameters=self._compute_parameters,
hybrid_parameters=self._hybrid_parameters,
from_files=False, value_dict=values,
bmi_parameters=self._bmi_parameters,)
segment_attributes=self._segment_attributes,
waterbody_attributes=self._waterbody_attributes)
Comment on lines +97 to +98
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It looks like your branch has not been rebased with recent changes to the master branch. These two lines are attempting to undo an edit in PR#622. Can you rebase this branch locally?


if len(values['upstream_id'])>0:
for key in values['upstream_id']:
del self._network._connections[key]
del self._network._reverse_network[key]
for tw in self._network._independent_networks.keys():
del self._network._independent_networks[tw][key]
for rli, _ in enumerate(self._network._reaches_by_tw[tw]):
self._network._reaches_by_tw[tw][rli].remove(key)

if self.showtiming:
network_end_time = time.time() # end of recording time for network creation start for data assimilation creation #
self.task_times['network_time'] += network_end_time - network_start_time


# Create data assimilation object with IDs but no dynamic variables yet.
# Dynamic variables will be assigned during 'run' function.
self._data_assimilation = tr.DataAssimilation(
Expand All @@ -90,17 +122,10 @@ def preprocess_static_vars(self, values: dict):
value_dict=values,
)

if len(values['upstream_id'])>0:
for key in values['upstream_id']:
del self._network._connections[key]
del self._network._reverse_network[key]
for tw in self._network._independent_networks.keys():
del self._network._independent_networks[tw][key]
for rli, _ in enumerate(self._network._reaches_by_tw[tw]):
self._network._reaches_by_tw[tw][rli].remove(key)



DA_end_time = time.time() # end of recording time for data assimilation creation #
self.task_times['data_assimilation_time'] += DA_end_time - network_end_time

def run(self, values: dict, until=300):
"""
Run this model into the future, updating the state stored in the provided model dict appropriately.
Expand All @@ -117,26 +142,38 @@ def run(self, values: dict, until=300):
"""
# Set input data into t-route objects
# Forcing values:
if self.showtiming:
forcing_start_time = time.time() # start of recording time for forcing values #

self._network._qlateral = pd.DataFrame(index=self._network.segment_index).join(
pd.DataFrame(values['land_surface_water_source__volume_flow_rate'],
index=values['land_surface_water_source__id'])
index=values['segment_id'])
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This too is attempting to revert changes from PR#622.

)
self._network._coastal_boundary_depth_df = pd.DataFrame(values['coastal_boundary__depth'])
if len(values['upstream_id'])>0:
flowveldepth_interorder = {values['upstream_id'][0]:{"results": values['upstream_fvd']}}
else:
flowveldepth_interorder = {}

# Trim the time-extent of the streamflow_da usgs_df
# what happens if there are timeslice files missing on the front-end?
# if the first column is some timestamp greater than t0, then this will throw
# an error. Need to think through this more.
if not self._data_assimilation.usgs_df.empty:
self._data_assimilation._usgs_df = self._data_assimilation.usgs_df.loc[:,self._network.t0:]
Comment on lines -130 to -135
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same thing here, reverting changes from PR#622

if self.showtiming:
forcing_end_time = time.time() # end of recording time for forcing values start of recording for data assimilation #
self.task_times['forcing_time'] += forcing_end_time - forcing_start_time

# Data Assimilation values:

self._data_assimilation._usgs_df = pd.DataFrame(values['usgs_gage_observation__volume_flow_rate'])
self._data_assimilation._last_obs_df = pd.DataFrame(values['lastobs__volume_flow_rate'])
self._data_assimilation._reservoir_usgs_df = pd.DataFrame(values['reservoir_usgs_gage_observation__volume_flow_rate'])
self._data_assimilation._reservoir_usace_df = pd.DataFrame(values['reservoir_usace_gage_observation__volume_flow_rate'])

if self.showtiming:
DA_end_time = time.time() # end of recording time for data assimilation creation & start of recording time for routing #
self.task_times['data_assimilation_time'] += DA_end_time - forcing_end_time

# Adjust number of steps based on user input
nts = int(until/self._time_step)


# Run routing
(
self._run_results,
Expand Down Expand Up @@ -201,11 +238,16 @@ def run(self, values: dict, until=300):

self._network.update_waterbody_water_elevation()


# update t0
self._network.new_t0(self._time_step, nts)

# get reservoir DA initial parameters for next loop iteration
self._data_assimilation.update_after_compute(self._run_results, self._time_step*nts)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here again, PR#622

self._data_assimilation.update_after_compute(self._run_results)

if self.showtiming:
run_end_time = time.time() # end of recording time for routing start of recording for output#
self.task_times['run_time'] += run_end_time - DA_end_time

# Create output flowveldepth and lakeout arrays
self._fvd, self._lakeout = _create_output_dataframes(
Expand All @@ -231,6 +273,28 @@ def run(self, values: dict, until=300):

# update model time
self._time += self._time_step * nts
if self.showtiming:
output_end_time = time.time() # end of recording time for creating output #
self.task_times['output_time'] += output_end_time - run_end_time
# self.task_times['total_time'] = time.time() - self.main_start_time

def finalize(self,):
if self.showtiming:
print('***************** TIMING SUMMARY *****************')
print('----------------------------------------')
total_time = (round(self.task_times['network_time'], 2) +
round(self.task_times['data_assimilation_time'], 2) +
round(self.task_times['forcing_time'], 2) +
round(self.task_times['run_time'], 2) +
round(self.task_times['output_time'], 2)
)
for key in self.task_times:
time_value = self.task_times[key]
percentage = (time_value / total_time) * 100
print(
f'{key} construction: {time_value:.2f} secs, {percentage:.2f} %'
)



# Utility functions -------
Expand Down Expand Up @@ -260,6 +324,7 @@ def _read_config_file(custom_input_file): #TODO: Update this function, I dont' t
with open(custom_input_file) as custom_file:
data = yaml.load(custom_file, Loader=yaml.SafeLoader)

log_parameters = data.get("log_parameters", {})
network_topology_parameters = data.get("network_topology_parameters", None)
supernetwork_parameters = network_topology_parameters.get(
"supernetwork_parameters", None
Expand All @@ -269,6 +334,24 @@ def _read_config_file(custom_input_file): #TODO: Update this function, I dont' t
supernetwork_parameters["title_string"] = "HY_Features Test"
supernetwork_parameters["geo_file_path"] = supernetwork_parameters['geo_file_path']
supernetwork_parameters["flowpath_edge_list"] = None
routelink_attr = {
#link????
"key": "id",
"downstream": "toid",
"dx": "length_m",
"n": "n", # TODO: rename to `manningn`
"ncc": "nCC", # TODO: rename to `mannningncc`
"s0": "So",
"bw": "BtmWdth", # TODO: rename to `bottomwidth`
#waterbody: "NHDWaterbodyComID",
"tw": "TopWdth", # TODO: rename to `topwidth`
"twcc": "TopWdthCC", # TODO: rename to `topwidthcc`
"alt": "alt",
"musk": "MusK",
"musx": "MusX",
"cs": "ChSlp" # TODO: rename to `sideslope`
}
supernetwork_parameters["columns"] = routelink_attr
Comment on lines +337 to +354
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also here, PR#622

supernetwork_parameters["waterbody_null_code"] = -9999
supernetwork_parameters["terminal_code"] = 0
supernetwork_parameters["driver_string"] = "NetCDF"
Expand All @@ -289,9 +372,9 @@ def _read_config_file(custom_input_file): #TODO: Update this function, I dont' t
)
output_parameters = data.get("output_parameters", {})
parity_parameters = output_parameters.get("wrf_hydro_parity_check", {})
bmi_parameters = data.get("bmi_parameters", {})

return (
log_parameters,
preprocessing_parameters,
supernetwork_parameters,
waterbody_parameters,
Expand All @@ -302,7 +385,6 @@ def _read_config_file(custom_input_file): #TODO: Update this function, I dont' t
output_parameters,
parity_parameters,
data_assimilation_parameters,
bmi_parameters,
)

def _retrieve_last_output(results, nts, waterbodies_df,):
Expand Down