Skip to content

Commit

Permalink
Pass missing dropout rate parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
reuben committed Apr 5, 2019
1 parent 5ee856d commit 6154150
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions DeepSpeech.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,9 @@ def create_model(batch_x, seq_length, dropout, reuse=False, previous_state=None,

# The next three blocks will pass `batch_x` through three hidden layers with
# clipped RELU activation and dropout.
layers['layer_1'] = layer_1 = dense('layer_1', batch_x, Config.n_hidden_1)
layers['layer_2'] = layer_2 = dense('layer_2', layer_1, Config.n_hidden_2)
layers['layer_3'] = layer_3 = dense('layer_3', layer_2, Config.n_hidden_3)
layers['layer_1'] = layer_1 = dense('layer_1', batch_x, Config.n_hidden_1, dropout_rate=dropout[0])
layers['layer_2'] = layer_2 = dense('layer_2', layer_1, Config.n_hidden_2, dropout_rate=dropout[1])
layers['layer_3'] = layer_3 = dense('layer_3', layer_2, Config.n_hidden_3, dropout_rate=dropout[2])

# `layer_3` is now reshaped into `[n_steps, batch_size, 2*n_cell_dim]`,
# as the LSTM RNN expects its input to be of shape `[max_time, batch_size, input_size]`.
Expand All @@ -147,7 +147,7 @@ def create_model(batch_x, seq_length, dropout, reuse=False, previous_state=None,
layers['rnn_output_state'] = output_state

# Now we feed `output` to the fifth hidden layer with clipped RELU activation
layers['layer_5'] = layer_5 = dense('layer_5', output, Config.n_hidden_5)
layers['layer_5'] = layer_5 = dense('layer_5', output, Config.n_hidden_5, dropout_rate=dropout[5])

# Now we apply a final linear layer creating `n_classes` dimensional vectors, the logits.
layers['layer_6'] = layer_6 = dense('layer_6', layer_5, Config.n_hidden_6, relu=False)
Expand Down

0 comments on commit 6154150

Please sign in to comment.