1. 相关命令
with tf.name_scope("accuracy"): tf.summary.scalar('accuracy', accuracy) # Add the accuracy to the summary merged_summary = tf.summary.merge_all() # Merge all summaries together writer = tf.summary.FileWriter(filewriter_path) # Initialize the FileWriter
writer.add_graph(sess.graph) # Add the model graph to TensorBoard
tf.summary.histogram(var.name, var) # Add the variables we train to the summary
tf.variable_scope(name)
2. tensorboard 存储格式及读取方法
存储文件格式为: events.out.tfevents.1493946419.wgb
我将上述文件存储在文件夹:~/Desktop/model_wgb/finetune_alexnet_with_tensorflow--master/data/filewriter
因此我在ubuntu 命令终端采用的打开命令: wgb@wgb:~/Desktop/model_wgb/finetune_alexnet_with_tensorflow--master/data$ tensorboard --logdir='filewriter/
得:
Starting TensorBoard b'41' on port 6006 (You can navigate to http://127.0.1.1:6006) 将http://127.0.1.1:6006 复制粘贴到IE浏览器中即可
可参考莫烦tensorflow视频14,15
3. 生成tensorboard的例子:
with tf.name_scope("cross_ent"):
loss = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(logits=score, labels=y))
# Train op
with tf.name_scope("train"):
# Get gradients of all trainable variables
gradients = tf.gradients(loss, var_list) #导数
gradients = list(zip(gradients, var_list))
# Create optimizer and apply gradient descent to the trainable variables
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
train_op = optimizer.apply_gradients(grads_and_vars=gradients)
# Add gradients to summary
for gradient, var in gradients:
tf.summary.histogram(var.name + '/gradient', gradient)
# Add the variables we train to the summary
for var in var_list:
tf.summary.histogram(var.name, var)
# Add the loss to summary
tf.summary.scalar('cross_entropy', loss)
# Evaluation op: Accuracy of the model
with tf.name_scope("accuracy"):
# top 1 accuracy
# correct_pred = tf.equal(tf.argmax(score, 1), tf.argmax(y, 1))
# accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# TODO top k accuracy
labels = tf.argmax(y, 1) # get real label
topFiver = tf.nn.in_top_k(score, labels, 1) # check if top N is in real label or not
accuracy = tf.reduce_mean(tf.cast(topFiver, tf.float32))
# Add the accuracy to the summary
tf.summary.scalar('accuracy', accuracy)
# Merge all summaries together
merged_summary = tf.summary.merge_all()
# Initialize the FileWriter
writer = tf.summary.FileWriter(filewriter_path)
# Initialize an saver for store model checkpoints
saver = tf.train.Saver()
# Initalize the data generator seperately for the training and validation set
train_generator = ImageDataGenerator(
train_file, horizontal_flip=True, shuffle=True)
val_generator = ImageDataGenerator(val_file, shuffle=False)
# Get the number of training/validation steps per epoch
train_batches_per_epoch = np.floor(train_generator.data_size /
batch_size).astype(np.int16)
val_batches_per_epoch = np.floor(val_generator.data_size /
batch_size).astype(np.int16)
# Start Tensorflow session
with tf.Session() as sess:
# Initialize all variables
sess.run(tf.global_variables_initializer())
# Add the model graph to TensorBoard
writer.add_graph(sess.graph)