Skip to content

Commit

Permalink
Address review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
reuben committed Mar 20, 2019
1 parent a6bbe8b commit 767e0b1
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 15 deletions.
10 changes: 3 additions & 7 deletions DeepSpeech.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from util.feeding import create_dataset, samples_to_mfccs, audiofile_to_features
from util.flags import create_flags, FLAGS
from util.logging import log_info, log_error, log_debug, log_warn
from util.text import Alphabet


# Graph Creation
Expand Down Expand Up @@ -382,6 +381,7 @@ def train(server=None):
# It will automagically get incremented by the optimizer.
global_step = tf.Variable(0, trainable=False, name='global_step')

# One rate per layer
dropout_rates = [tf.placeholder(tf.float32, name='dropout_{}'.format(i)) for i in range(6)]

# Create training and validation datasets
Expand Down Expand Up @@ -463,12 +463,7 @@ def end(self, session):
hooks.append(tf.train.CheckpointSaverHook(checkpoint_dir=FLAGS.checkpoint_dir, save_secs=FLAGS.checkpoint_secs, saver=saver))

no_dropout_feed_dict = {
dropout_rates[0]: 0.,
dropout_rates[1]: 0.,
dropout_rates[2]: 0.,
dropout_rates[3]: 0.,
dropout_rates[4]: 0.,
dropout_rates[5]: 0.,
rate: 0. for rate in dropout_rates
}

# Progress Bar
Expand Down Expand Up @@ -667,6 +662,7 @@ def create_inference_graph(batch_size=1, n_steps=16, tflite=False):

previous_state = tf.contrib.rnn.LSTMStateTuple(previous_state_c, previous_state_h)

# One rate per layer
no_dropout = [None] * 6

if tflite:
Expand Down
14 changes: 6 additions & 8 deletions evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,27 +5,24 @@
import itertools
import json
import numpy as np
import os
import pandas
import progressbar
import sys
import tensorflow as tf

from ds_ctcdecoder import ctc_beam_search_decoder_batch, Scorer
from multiprocessing import Pool, cpu_count
from multiprocessing import cpu_count
from six.moves import zip, range
from util.config import Config, initialize_globals
from util.evaluate_tools import calculate_report
from util.feeding import create_dataset
from util.flags import create_flags, FLAGS
from util.logging import log_error
from util.feeding import create_dataset
from util.text import Alphabet, levenshtein, text_to_char_array
from util.evaluate_tools import process_decode_result, calculate_report
from util.text import levenshtein


def sparse_tensor_value_to_texts(value, alphabet):
r"""
Given a :class:`tf.SparseTensor` ``value``, return an array of Python strings
representing its values.
representing its values, converting tokens to strings using ``alphabet``.
"""
return sparse_tuple_to_texts((value.indices, value.values, value.dense_shape), alphabet)

Expand Down Expand Up @@ -53,6 +50,7 @@ def evaluate(test_csvs, create_model):

(batch_x, batch_x_len), batch_y = it.get_next()

# One rate per layer
no_dropout = [None] * 6
logits, _ = create_model(batch_x=batch_x,
seq_length=batch_x_len,
Expand Down

0 comments on commit 767e0b1

Please sign in to comment.