Trick_2
Save & restore
# save
X = tf.placeholder('float', [None, 4000], name='input_X')
Y = tf.placeholder('float', [None, 2], name='label_Y')
is_training = tf.placeholder(tf.bool, name='is_training')
logits = build_model(is_training, X)
tf.identity(logits, name='output_Y')
# ...
tf.saved_model.simple_save(
sess,
save_dir.as_posix(),
inputs={
'input_X': X,
'is_training': is_training
},
outputs={'output_Y': logits}
)
# restore
with tf.Session() as sess:
tf.saved_model.loader.load(
sess,
['serve'],
save_dir.as_posix()
)
X = sess.graph.get_tensor_by_name('input_X:0')
Y = sess.graph.get_tensor_by_name('output_Y:0')
is_training = sess.graph.get_tensor_by_name('is_training:0')
valid_pos_predictions = sess.run(
Y,
feed_dict={
X: valid_pos[:, 0:4000],
is_training: False
}
)
Tensorboard
with tf.name_scope('Inputs'):
...
with tf.variable_scope('fc_1'):
...
tf.summary.scalar('loss', loss_op)
tf.summary.scalar('acc', acc_op)
tf.summary.histogram(name='1st output', values=out)
merged = tf.summary.merge_all()
init = tf.global_variables_initializer()
with tf.Session() as sess:
writer_valid_pos = tf.summary.FileWriter('TensorBoard/valid_pos/', graph=sess.graph)
writer_valid_neg = tf.summary.FileWriter('TensorBoard/valid_neg/', graph=sess.graph)
...
for step in range(1, num_steps+1):
_, _, summ_valid_pos = sess.run(
[loss_op, acc_op, merged],
feed_dict={
X: valid_pos[:, 0:4000],
Y: valid_pos[:, 4000:4002],
is_training: False
})
_, _, summ_valid_neg = sess.run(
[loss_op, acc_op, merged],
feed_dict={
X: valid_neg[:, 0:4000],
Y: valid_neg[:, 4000:4002],
is_training: False
})
writer_valid_pos.add_summary(summ_valid_pos, step)
writer_valid_neg.add_summary(summ_valid_neg, step)
Last updated
Was this helpful?