From f1cea02c31f81f3d1fbac5e1b1999f11bc2e17b4 Mon Sep 17 00:00:00 2001 From: Saranya Venkatraman Date: Mon, 17 Feb 2020 19:19:48 -0500 Subject: [PATCH] batching conditional generation --- src/interactive_conditional_samples.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/interactive_conditional_samples.py b/src/interactive_conditional_samples.py index 8b66000d7..092a56fad 100755 --- a/src/interactive_conditional_samples.py +++ b/src/interactive_conditional_samples.py @@ -68,12 +68,14 @@ def interact_model( saver = tf.train.Saver() ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name)) saver.restore(sess, ckpt) - - while True: - raw_text = input("Model prompt >>> ") - while not raw_text: - print('Prompt should not be empty!') - raw_text = input("Model prompt >>> ") + prompts = ["This is the first prompt", "This is the second", "And you can read prefixes from a file too"] + for prompt in prompts: + raw_text = prompt + #raw_text = input("Model prompt >>> ") + #while not raw_text: + #print('Prompt should not be empty!') + #raw_text = input("Model prompt >>> ") + context_tokens = enc.encode(raw_text) generated = 0 for _ in range(nsamples // batch_size):