aboutsummaryrefslogtreecommitdiff
path: root/client.py
diff options
context:
space:
mode:
Diffstat (limited to 'client.py')
-rw-r--r--client.py33
1 files changed, 33 insertions, 0 deletions
diff --git a/client.py b/client.py
new file mode 100644
index 0000000..7b14701
--- /dev/null
+++ b/client.py
@@ -0,0 +1,33 @@
+import os
+os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
+
+import tensorflow as tf
+
+model = tf.keras.models.load_model('model.h5', compile=False)
+
+char2idx = {'\n': 0, ' ': 1, '!': 2, '$': 3, '&': 4, "'": 5, ',': 6, '-': 7, '.': 8, '3': 9, ':': 10, ';': 11, '?': 12, 'A': 13, 'B': 14, 'C': 15, 'D': 16, 'E': 17, 'F': 18, 'G': 19, 'H': 20, 'I': 21, 'J': 22, 'K': 23, 'L': 24, 'M': 25, 'N': 26, 'O': 27, 'P': 28, 'Q': 29, 'R': 30, 'S': 31, 'T': 32, 'U': 33, 'V': 34, 'W': 35, 'X': 36, 'Y': 37, 'Z': 38, 'a': 39, 'b': 40, 'c': 41, 'd': 42, 'e': 43, 'f': 44, 'g': 45, 'h': 46, 'i': 47, 'j': 48, 'k': 49, 'l': 50, 'm': 51, 'n': 52, 'o': 53, 'p': 54, 'q': 55, 'r': 56, 's': 57, 't': 58, 'u': 59, 'v': 60, 'w': 61, 'x': 62, 'y': 63, 'z': 64}
+idx2char = ['\n', ' ', '!', '$', '&', "'", ',', '-', '.', '3', ':', ';', '?', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
+
+def generate_text(model, start_string=u'ROMEO:', num_generate=1000, temperature=0.7):
+ num_generate = 1000
+
+ input_eval = [char2idx[s] for s in start_string]
+ input_eval = tf.expand_dims(input_eval, 0)
+
+ text_generated = []
+
+ temperature = 0.7
+
+ model.reset_states()
+ for i in range(num_generate):
+ predictions = model(input_eval)
+ predictions = tf.squeeze(predictions, 0)
+ predictions = predictions / temperature
+ predicted_id = tf.random.categorical(predictions, num_samples=1)[-1,0].numpy()
+ print(predicted_id)
+ input_eval = tf.expand_dims([predicted_id], 0)
+ text_generated.append(idx2char[predicted_id])
+
+ return (start_string + ''.join(text_generated))
+
+print(generate_text(model))