From 9727bf8cc7d41dd44dd40ac30ff3c11d01ae1226 Mon Sep 17 00:00:00 2001 From: Valery Chernov Date: Fri, 20 Aug 2021 18:44:00 +0300 Subject: [PATCH] small fixes in comments --- python/tvm/relay/frontend/pytorch.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/tvm/relay/frontend/pytorch.py b/python/tvm/relay/frontend/pytorch.py index f616c3bf70376..613643f091d79 100644 --- a/python/tvm/relay/frontend/pytorch.py +++ b/python/tvm/relay/frontend/pytorch.py @@ -2345,7 +2345,7 @@ def bidir_gru_cell( def gru_layers(self, input_data, layer_weights_dicts, bidirectional, dropout_p=0.0): """ - Methods iterates layers for Stacked LSTM + Methods iterates layers for Stacked GRU """ layers_num = len(layer_weights_dicts) # split input sequence to samples set @@ -2368,7 +2368,7 @@ def gru_layers(self, input_data, layer_weights_dicts, bidirectional, dropout_p=0 if dropout_p != 0 and i < layers_num - 1: # for input in input_seqs: # input = _op.dropout(input, dropout_p) - raise NotImplementedError("Dropout for LSTM has not been supported yet!") + raise NotImplementedError("Dropout for GRU has not been supported yet!") return _op.stack(input_seqs, 0), _op.stack(output_hiddens, 0) @@ -2447,7 +2447,7 @@ def gru(self, inputs, input_types): names = ["hidden_state", "w_inp", "w_hid", "b_inp", "b_hid"] if bidirectional: rsd = len(_weights) % (2 * weights_num) - assert rsd == 0, "got an incorrect number of LSTM weights" + assert rsd == 0, "got an incorrect number of GRU weights" for i in range(0, len(_weights), 2 * weights_num): fw_tensors = [layers_h[2 * k], *_weights[i : i + 4]] fw_weights_dict = dict(zip(names, fw_tensors)) @@ -2457,7 +2457,7 @@ def gru(self, inputs, input_types): layer_weights_dicts.append([fw_weights_dict, rev_weights_dict]) k += 1 else: - assert len(_weights) % weights_num == 0, "got an incorrect number of LSTM weights" + assert len(_weights) % weights_num == 0, "got an incorrect number of GRU weights" for i in range(0, len(_weights), weights_num): fw_tensors = [layers_h[k], *_weights[i : i + 4]] fw_weights_dict = dict(zip(names, fw_tensors)) @@ -2467,7 +2467,7 @@ def gru(self, inputs, input_types): names = ["hidden_state", "w_inp", "w_hid"] if bidirectional: rsd = len(_weights) % (2 * weights_num) - assert rsd == 0, "got an incorrect number of LSTM weights" + assert rsd == 0, "got an incorrect number of GRU weights" for i in range(0, len(_weights), 2 * weights_num): fw_tensors = [layers_h[2 * k], *_weights[i : i + 2]] fw_weights_dict = dict(zip(names, fw_tensors)) @@ -2477,7 +2477,7 @@ def gru(self, inputs, input_types): layer_weights_dicts.append([fw_weights_dict, rev_weights_dict]) k += 1 else: - assert len(_weights) % weights_num == 0, "got an incorrect number of LSTM weights" + assert len(_weights) % weights_num == 0, "got an incorrect number of GRU weights" for i in range(0, len(_weights), weights_num): fw_tensors = [layers_h[k], *_weights[i : i + 2]] fw_weights_dict = dict(zip(names, fw_tensors))