python测试集结果调取问题

test_acc = sess.run(accr,feed_dict=feeds_test)
这个语句是用来调出测试准确率的,如何才能调出测试期间对每一个样本的预测数值????

x=tf.placeholder("float", [None,784])
#placeholder 占位,不赋给x实际值,784 像素值, None无穷样本
y=tf.placeholder("float", [None,10])
#10个分类目标
W=tf.Variable(tf.zeros([784,10]))
b=tf.Variable(tf.zeros([10]))
#tf.zeros 初始化

actv= tf.nn.softmax(tf.matmul(x,W)+b) #cost function
cost=tf.reduce_mean(-tf.reduce_sum(y*tf.log(actv), reduction_indices=1))
learning_rate=0.01
optm= tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
print ('1')

pred=tf.equal(tf.argmax(actv, 1), tf.argmax(y, 1))
#ACCURACY
accr=tf.reduce_mean(tf.cast(pred,"float"))
#INITIALIZER
init=tf.global_variables_initializer()

training_epochs = 50 #所有样本迭代次数=50
batch_size = 100 #每次迭代用多少样本
display_step = 5 #展示
sess=tf.Session()
sess.run(init) #跑初始化
for epoch in range (training_epochs):
avg_cost=0
num_batch=int(mnist.train.num_examples/batch_size)
for i in range (num_batch):
batch_xs, batch_ys= mnist.train.next_batch(batch_size) #一步一步的往下找
sess.run(optm, feed_dict={x: batch_xs, y: batch_ys})
feeds={x:batch_xs, y: batch_ys}
avg_cost += sess.run (cost, feed_dict=feeds)/num_batch
#display
if epoch % display_step == 0:
feeds_train = {x: batch_xs, y: batch_ys}
feeds_test = {x: mnist.test.images, y: mnist.test.labels}
train_acc = sess.run(accr, feed_dict=feeds_train) #feed_dict 针对place holder占位
test_acc = sess.run(accr,feed_dict=feeds_test)
print ("Epoch: %03d/%03d cost: %.9f train_acc: %.3f test_acc: %.3f"
% (epoch, training_epochs, avg_cost, train_acc, test_acc))

2个回答

简单的看了下,把sess.run(accr,feed_dict=feeds_test)改成sess.run([accr,actv],feed_dict=feeds_test)应该可以

weixin_41138872
weixin_41138872 我很奇怪为什么 测试的准确度跟训练的准确度是同样的语句?train_acc = sess.run(accr, feed_dict=feeds_train) ,test_acc = sess.run(accr,feed_dict=feeds_test),这个test_acc真的是测试准确度么?还是仅仅是多训练了一遍?
一年多之前 回复

试试其他方法,相信自己肯定可以的

Csdn user default icon
上传中...
上传图片
插入图片
抄袭、复制答案,以达到刷声望分或其他目的的行为,在CSDN问答是严格禁止的,一经发现立刻封号。是时候展现真正的技术了!