I can get traing loss every global step. But I do want to add the evaluate loss in graph 'lossxx' in tensorboard. How to do that?
class MyHook(tf.train.SessionRunHook):
def after_run(self,run_context,run_value):
_session = run_context.session
_session.run(_session.graph.get_operation_by_name('acc_op'))
def my_model(features, labels, mode):
...
logits = tf.layers.dense(net, 3, activation=None)
predicted_classes = tf.argmax(logits, 1)
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = {
'class': predicted_classes,
'prob': tf.nn.softmax(logits)
}
return tf.estimator.EstimatorSpec(mode, predictions=predictions)
# Compute loss.
loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)
acc, acc_op = tf.metrics.accuracy(labels=labels, predictions=predicted_classes)
tf.identity(acc_op,'acc_op')
loss_sum = tf.summary.scalar('lossxx',loss)
accuracy_sum = tf.summary.scalar('accuracyxx',acc)
merg = tf.summary.merge_all()
# Create training op.
if mode == tf.estimator.ModeKeys.TRAIN:
optimizer = tf.train.AdagradOptimizer(learning_rate=0.1)
train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op,
training_chief_hooks=[
tf.train.SummarySaverHook(save_steps=10, output_dir='./model', summary_op=merg)])
return tf.estimator.EstimatorSpec(
mode, loss=loss, eval_metric_ops={'accuracy': (acc, acc_op)}
)
classifier.train(input_fn=train_input_fn, steps=1000,hooks=[ MyHook()])

global_step) of lossxx with the x-axis from the evaluation? - maddin25