Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions mnist_cnn_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def train():
loss = slim.losses.softmax_cross_entropy(y,y_)

# Create a summary to monitor loss tensor
tf.scalar_summary('loss', loss)
tf.summary.scalar('loss', loss)

# Define optimizer
with tf.name_scope("ADAM"):
Expand All @@ -67,18 +67,18 @@ def train():
train_step = tf.train.AdamOptimizer(learning_rate).minimize(loss,global_step=batch)

# Create a summary to monitor learning_rate tensor
tf.scalar_summary('learning_rate', learning_rate)
tf.summary.scalar('learning_rate', learning_rate)

# Get accuracy of model
with tf.name_scope("ACC"):
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

# Create a summary to monitor accuracy tensor
tf.scalar_summary('acc', accuracy)
tf.summary.scalar('acc', accuracy)

# Merge all summaries into a single op
merged_summary_op = tf.merge_all_summaries()
merged_summary_op = tf.summary.merge_all()

# Add ops to save and restore all the variables
saver = tf.train.Saver()
Expand All @@ -89,7 +89,7 @@ def train():
total_batch = int(train_size / batch_size)

# op to write logs to Tensorboard
summary_writer = tf.train.SummaryWriter(LOGS_DIRECTORY, graph=tf.get_default_graph())
summary_writer = tf.summary.FileWriter(LOGS_DIRECTORY, graph=tf.get_default_graph())

# Save the maximum accuracy value for validation data
max_acc = 0.
Expand Down