TF
Import data
1. Dataset & iterator
tf.data.Dataset
= a sequence of elements
tf.data.Iterator
= extract elements from a dataset
2. Load data
dataset1 = tf.data.Dataset.from_tensor_slices(tf.random_uniform([4, 10]))
print(dataset1.output_types) # ==> "tf.float32"
print(dataset1.output_shapes) # ==> "(10,)"
dataset2 = tf.data.Dataset.from_tensor_slices(
(
tf.random_uniform([4]),
tf.random_uniform([4, 100], maxval=100, dtype=tf.int32)
)
)
print(dataset2.output_types) # ==> "(tf.float32, tf.int32)"
print(dataset2.output_shapes) # ==> "((), (100,))"
3. Transformation
dataset = (tf.data.Dataset.from_tensor_slices((tf.constant(filenames), tf.constant(labels)))
.shuffle(num_samples) # whole dataset into the buffer ensures good shuffling
.map(parse_fn, num_parallel_calls=params.num_parallel_calls)
.map(train_fn, num_parallel_calls=params.num_parallel_calls)
.batch(params.batch_size)
.prefetch(1) # make sure you always have one batch ready to serve
4. Iterator
dataset = tf.data.Dataset.range(100)
iterator = dataset.make_one_shot_iterator()
next_element = iterator.get_next()
for i in range(100):
value = sess.run(next_element)
assert i == value
dataset = tf.data.Dataset.range(100)
iterator = dataset.make_initializable_iterator()
next_element = iterator.get_next()
# Initialize an iterator over a dataset with 10 elements.
sess.run(iterator.initializer)
for i in range(100):
value = sess.run(next_element)
assert i == value
Eager Execution
from __future__ import absolute_import, division, print_function
import tensorflow as tf
tf.enable_eager_execution()
tf.executing_eagerly()
>> True
Some functions
get_global_step
global_step = tf.train.get_global_step()
tf.train.Saver
last_saver = tf.train.Saver() # will keep last 5 epochs
with tf.Session() as sess:
...
last_saver.restore(sess, restore_from)
...
last_saver.save(sess, last_save_path, global_step=epoch + 1)
best_saver = tf.train.Saver(max_to_keep=1) # only keep 1 best checkpoint
with tf.Session() as sess:
...
best_save_path = best_saver.save(sess, best_save_path, global_step=epoch + 1)
Output
to screen
with tf.Session() as sess:
sess.run(init)
for step in range(11):
sess.run(train_op, feed_dict=feed_dict)
if step % 5 == 0:
loss = sess.run(loss_op, feed_dict=feed_dict)
print(loss)
tensorboard
with tf.name_scope('Layer'):
...
outputs = activation_function(formula)
tf.summary.histogram(name=filename, values=outputs)
with tf.name_scope('Loss'):
...
loss = tf.reduce_mean(tf.square(y_feeds - output_layer))
tf.summary.scalar('loss', loss)
merged = tf.summary.merge_all()
with tf.Session() as sess:
sess.run(init)
writer = tf.summary.FileWriter("TensorBoard/", graph=sess.graph) # add graph
for step in range(11):
sess.run(train, feed_dict=feed_dict)
if step % 5 == 0:
result = sess.run(merged, feed_dict=feed_dict)
writer.add_summary(result, step) # add histogram & scalar
Last updated
Was this helpful?