Skip to content

Commit

Permalink
Minor logging change
Browse files Browse the repository at this point in the history
  • Loading branch information
batzner committed Aug 25, 2017
1 parent 487c2fb commit 4ff864a
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 6 deletions.
8 changes: 4 additions & 4 deletions aws-clipboard.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
ssh -i ~/dev/pems/tensorflow.pem ubuntu@54.149.27.213

git clone https://github.com/batzner/tensorlm
cd tensorlm
mkdir datasets
Expand All @@ -11,8 +13,6 @@ scp -i ~/dev/pems/tensorflow.pem ~/dev/python-hacks/tensorlm/datasets/sherlock/v

# Train
screen
python3 -m tensorlm.run --train=True --level=word --max_vocab_size=10000 --neurons_per_layer=250 --num_layers=3 --max_batch_size=100 --num_timesteps=160 --save_dir=out/model --evaluate_text_path=datasets/sherlock/valid.txt --train_text_path=datasets/sherlock/train.txt --max_epochs=30 --save_interval_hours=1

# ctrl-a d
python3 -m tensorlm.run --train=True --level=word --max_vocab_size=10000 --neurons_per_layer=250 --num_layers=3 --max_batch_size=100 --num_timesteps=160 --save_dir=out/model --evaluate_text_path=datasets/sherlock/valid.txt --train_text_path=datasets/sherlock/train.txt --max_epochs=300 --save_interval_hours=1

python3 -m tensorlm.run --train=True --log_interval=1 --level=word --max_vocab_size=1000 --neurons_per_layer=250 --num_layers=3 --max_batch_size=100 --num_timesteps=160 --save_dir=out/model-small --evaluate_text_path=datasets/sherlock/valid.txt --train_text_path=datasets/sherlock/train.txt --max_epochs=30 --save_interval_hours=1
# ctrl-a d
4 changes: 3 additions & 1 deletion tensorlm/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,11 @@ def _update_batches(self, tokens):

# Each batch is a tuple with inputs and targets
self.index_to_batch[self.next_batch_index_to_load] = (batch_input_ids, batch_target_ids)

self.next_batch_index_to_load += 1

LOGGER.debug("Loaded batches %d to %d", min(self.index_to_batch.keys()),
max(self.index_to_batch.keys()))

def _split_tokens_in_batches(self, tokens):
# Start the rows of batches at equidistant points in the tokens

Expand Down
1 change: 0 additions & 1 deletion tensorlm/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def train(self, tf_session, max_epochs=10, max_steps=None, batch_size=None, text
train_set = Dataset(text_path, self.vocab, batch_size,
self.num_timesteps)

LOGGER.info()
while (train_state.epoch <= max_epochs and
(not max_steps or train_state.global_step <= max_steps)):

Expand Down

0 comments on commit 4ff864a

Please sign in to comment.