pldcat 2020-01-25 18:05 采纳率: 50%
浏览 559
已采纳

minst深度学习例程不收敛,成功率始终在十几

minst深度学习程序不收敛
是关于tensorflow的问题。我是tensorflow的初学者。从书上抄了minst的学习程序。但是运行之后,无论学习了多少批次,成功率基本不变。
我做了许多尝试,去掉了正则化,去掉了滑动平均,还是不行。把batch_size改成了2,观察变量运算情况,输入x是正确的,但神经网络的输出y很多情况下在x不一样的情况下y的两个结果是完全一样的。进而softmax的结果也是一样的。百思不得其解,找不到造成这种情况的原因。这里把代码和运行情况都贴出来,请大神帮我找找原因。大过年的,祝大家春节快乐万事如意。

补充一下,进一步的测试表明,不是不能完成训练,而是要到700000轮以上,且最高达到65%左右就不能提高了。仔细看每一步的参数,是regularization值过大10e15以上,一点点减少,前面的训练都在训练它了。这东西我不是很明白。

import struct
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
import tensorflow as tf
import time

#把MNIST的操作封装在一个类中,以后用起来方便。
class MyMinst():

    def decode_idx3_ubyte(self,idx3_ubyte_file):
        with open(idx3_ubyte_file, 'rb') as f:
            print('解析文件:', idx3_ubyte_file)
            fb_data = f.read()

        offset = 0
        fmt_header = '>iiii'    # 以大端法读取4个 unsinged int32
        magic_number, num_images, num_rows, num_cols = struct.unpack_from(fmt_header, fb_data, offset)
        print('idex3 魔数:{},图片数:{}'.format(magic_number, num_images))
        offset += struct.calcsize(fmt_header)
        fmt_image = '>' + str(num_rows * num_cols) + 'B'

        images = np.empty((num_images, num_rows*num_cols)) #做了修改
        for i in range(num_images):
            im = struct.unpack_from(fmt_image, fb_data, offset)
            images[i] = np.array(im)#这里用一维数组表示图片,np.array(im).reshape((num_rows, num_cols))
            offset += struct.calcsize(fmt_image)
        return images
    def decode_idx1_ubyte(self,idx1_ubyte_file):
        with open(idx1_ubyte_file, 'rb') as f:
            print('解析文件:', idx1_ubyte_file)
            fb_data = f.read()

        offset = 0
        fmt_header = '>ii'  # 以大端法读取两个 unsinged int32
        magic_number, label_num = struct.unpack_from(fmt_header, fb_data, offset)
        print('idex1 魔数:{},标签数:{}'.format(magic_number, label_num))
        offset += struct.calcsize(fmt_header)
        labels = np.empty(shape=[0,10],dtype=float) #神经网络需要把label变成10位float的数组

        fmt_label = '>B'    # 每次读取一个 byte
        for i in range(label_num):
            n=struct.unpack_from(fmt_label, fb_data, offset)
            labels=np.append(labels,[[0,0,0,0,0,0,0,0,0,0]],axis=0)
            labels[i][n]=1
            offset += struct.calcsize(fmt_label)
        return labels  

    def __init__(self):
        #固定的训练文件位置
        self.img=self.decode_idx3_ubyte("/home/zhangyl/Downloads/mnist/train-images.idx3-ubyte")
        self.result=self.decode_idx1_ubyte("/home/zhangyl/Downloads/mnist/train-labels.idx1-ubyte")

        print(self.result[0])
        print(self.result[1000])
        print(self.result[25000])
        #固定的验证文件位置
        self.validate_img=self.decode_idx3_ubyte("/home/zhangyl/Downloads/mnist/t10k-images.idx3-ubyte")
        self.validate_result=self.decode_idx1_ubyte("/home/zhangyl/Downloads/mnist/t10k-labels.idx1-ubyte")
        #每一批读训练数据的起始位置
        self.train_read_addr=0
        #每一批读训练数据的batchsize
        self.train_batchsize=100
        #每一批读验证数据的起始位置
        self.validate_read_addr=0
        #每一批读验证数据的batchsize
        self.validate_batchsize=100
        #定义用于返回batch数据的变量
        self.train_img_batch=self.img
        self.train_result_batch=self.result
        self.validate_img_batch=self.validate_img
        self.validate_result_batch=self.validate_result

    def get_next_batch_traindata(self):
        n=len(self.img) #对参数范围适当约束
        if self.train_read_addr+self.train_batchsize<=n :
            self.train_img_batch=self.img[self.train_read_addr:self.train_read_addr+self.train_batchsize]
            self.train_result_batch=self.result[self.train_read_addr:self.train_read_addr+self.train_batchsize]
            self.train_read_addr+=self.train_batchsize #改变起始位置
            if self.train_read_addr==n :
                self.train_read_addr=0
        else:
            self.train_img_batch=self.img[self.train_read_addr:n]
            self.train_img_batch.append(self.img[0:self.train_read_addr+self.train_batchsize-n])
            self.train_result_batch=self.result[self.train_read_addr:n]
            self.train_result_batch.append(self.result[0:self.train_read_addr+self.train_batchsize-n])
            self.train_read_addr=self.train_read_addr+self.train_batchsize-n #改变起始位置,这里没考虑batchsize大于n的情形
        return self.train_img_batch,self.train_result_batch #测试一下用临时变量返回是否可行


    def set_train_read_addr(self,addr):
        self.train_read_addr=addr
    def set_train_batchsize(self,batchsize):
        self.train_batchsize=batchsize
        if batchsize <1 :
            self.train_batchsize=1
    def set_validate_read_addr(self,addr):
        self.validate_read_addr=addr
    def set_validate_batchsize(self,batchsize):
        self.validate_batchsize=batchsize
        if batchsize<1 :
            self.validate_batchsize=1

myminst=MyMinst() #minst类的实例
batch_size=2  #设置每一轮训练的Batch大小
learning_rate=0.8 #初始学习率
learning_rate_decay=0.999 #学习率的衰减
max_steps=300000 #最大训练步数

#定义存储训练轮数的变量,在使用tensorflow训练神经网络时,
#一般会将代表训练轮数的变量通过trainable参数设置为不可训练的
training_step = tf.Variable(0,trainable=False)

#定义得到隐藏层和输出层的前向传播计算方式,激活函数使用relu()
def hidden_layer(input_tensor,weights1,biases1,weights2,biases2,layer_name):
    layer1=tf.nn.relu(tf.matmul(input_tensor,weights1)+biases1)
    return tf.matmul(layer1,weights2)+biases2

x=tf.placeholder(tf.float32,[None,784],name="x-input")
y_=tf.placeholder(tf.float32,[None,10],name="y-output")

#生成隐藏层参数,其中weights包含784*500=39200个参数
weights1=tf.Variable(tf.truncated_normal([784,500],stddev=0.1))
biases1=tf.Variable(tf.constant(0.1,shape=[500]))

#生成输出层参数,其中weights2包含500*10=5000个参数
weights2=tf.Variable(tf.truncated_normal([500,10],stddev=0.1))
biases2=tf.Variable(tf.constant(0.1,shape=[10]))

#计算经过神经网络前后向传播后得到的y值
y=hidden_layer(x,weights1,biases1,weights2,biases2,'y')

#初始化一个滑动平均类,衰减率为0.99
#为了使模型在训练前期可以更新的更快,这里提供了num_updates参数,并设置为当前网络的训练轮数
#averages_class=tf.train.ExponentialMovingAverage(0.99,training_step)

#定义一个更新变量滑动平均值的操作需要向滑动平均类的apply()函数提供一个参数列表
#train_variables()函数返回集合图上Graph.TRAINABLE_VARIABLES中的元素。
#这个集合的元素就是所有没有指定trainable_variables=False的参数
#averages_op=averages_class.apply(tf.trainable_variables())

#再次计算经过神经网络前向传播后得到的y值,这里使用了滑动平均,但要牢记滑动平均值只是一个影子变量
#average_y=hidden_layer(x,averages_class.average(weights1),
 #                        averages_class.average(biases1),
  #                       averages_class.average(weights2),
   #                      averages_class.average(biases2),
    #                     'average_y')

#softmax,计算交叉熵损失,L2正则,随机梯度优化器,学习率采用指数衰减

#函数原型为sparse_softmax_cross_entropy_with_logits(_sential,labels,logdits,name)
#与softmax_cross_entropy_with_logits()函数的计算方式相同,更适用于每个类别相互独立且排斥
#的情况,即每一幅图只能属于一类
#在1.0.0版本的TensorFlow中,这个函数只能通过命名参数的方式来使用,在这里logits参数是神经网
#络不包括softmax层的前向传播结果,lables参数给出了训练数据的正确答案
softmax=tf.nn.softmax(y)
cross_entropy=tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y+1e-10,labels=tf.argmax(y_,1))

#argmax()函数原型为argmax(input,axis,name,dimension)用于计算每一个样例的预测答案,其中
# input参数y是一个batch_size*10(batch_size行,10列)的二维数组。每一行表示一个样例前向传
# 播的结果,axis参数“1”表示选取最大值的操作只在第一个维度进行。即只在每一行选取最大值对应的下标
# 于是得到的结果是一个长度为batch_size的一维数组,这个一维数组的值就表示了每一个样例的数字识别
# 结果。

regularizer=tf.contrib.layers.l2_regularizer(0.0001)
                        #计算L2正则化损失函数
regularization=regularizer(weights1)+regularizer(weights2)
                        #计算模型的正则化损失
loss=tf.reduce_mean(cross_entropy)#+regularization
                        #总损失

#用指数衰减法设置学习率,这里staircase参数采用默认的False,即学习率连续衰减
learning_rate=tf.train.exponential_decay(learning_rate,training_step,
                                        batch_size,learning_rate_decay)

#使用GradientDescentOptimizer优化算法来优化交叉熵损失和正则化损失
train_op=tf.train.GradientDescentOptimizer(learning_rate).minimize(loss,
                                                             global_step=training_step)

#在训练这个模型时,每过一遍数据既需要通过反向传播来更新神经网络中的参数,又需要
# 更新每一个参数的滑动平均值。control_dependencies()用于这样的一次性多次操作
#同样的操作也可以使用下面这行代码完成:
#train_op=tf.group(train_step,average_op)
#with tf.control_dependencies([train_step,averages_op]):
 #            train_op=tf.no_op(name="train")

#检查使用了滑动平均模型的神经网络前向传播结果是否正确
#equal()函数原型为equal(x,y,name),用于判断两个张量的每一维是否相等。
#如果相等返回True,否则返回False
crorent_predicition=tf.equal(tf.argmax(y,1),tf.argmax(y_,1))

#cast()函数的原型为cast(x,DstT,name),在这里用于将一个布尔型的数据转换为float32类型
#之后对得到的float32型数据求平均值,这个平均值就是模型在这一组数据上的正确率
accuracy=tf.reduce_mean(tf.cast(crorent_predicition,tf.float32))

#创建会话和开始训练过程
with tf.Session() as sess:
    #在稍早的版本中一般使用initialize_all_variables()函数初始化全部变量
    tf.global_variables_initializer().run()

    #准备验证数据
    validate_feed={x:myminst.validate_img,y_:myminst.validate_result}
    #准备测试数据
    test_feed= {x:myminst.img,y_:myminst.result}

    for i in range(max_steps):
        if i%1000==0:
            #计算滑动平均模型在验证数据上的结果
            #为了能得到百分数输出,需要将得到的validate_accuracy扩大100倍
            validate_accuracy= sess.run(accuracy,feed_dict=validate_feed)
            print("After %d trainning steps,validation accuracy using average model is %g%%" %(i,validate_accuracy*100))

#产生这一轮使用一个batch的训练数据,并进行训练
        #input_data.read_data_sets()函数生成的类提供了train.next_batch()函数
        #通过设置函数的batch_size参数就可以从所有的训练数据中读取一个小部分作为一个训练batch
        myminst.set_train_batchsize(batch_size)
        xs,ys=myminst.get_next_batch_traindata()
        var_print=sess.run([x,y,y_,loss,train_op,softmax,cross_entropy,regularization,weights1],feed_dict={x:xs,y_:ys})
        print("after ",i," trainning steps:")
        print("x=",var_print[0][0],var_print[0][1],"y=",var_print[1],"y_=",var_print[2],"loss=",var_print[3],
"softmax=",var_print[5],"cross_entropy=",var_print[6],"regularization=",var_print[7],var_print[7])
        time.sleep(0.5)    

    #使用测试数据集检验神经网络训练之后的正确率
    #为了能得到百分数输出,需要将得到的test_accuracy扩大100倍
    test_accuracy=sess.run(accuracy,feed_dict=test_feed)
    print("After %d training steps,test accuracy using average model is %g%%"%(max_steps,test_accuracy*100))

下面是运行情况的一部分:
x= [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  8. 76. 202. 254.
 255. 163. 37.  2.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 13. 182. 253. 253. 253.
 253. 253. 253. 23.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 15. 179. 253. 253. 212. 91.
 218. 253. 253. 179. 109.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 105. 253. 253. 160. 35. 156.
 253. 253. 253. 253. 250. 113.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 19. 212. 253. 253. 88. 121. 253.
 233. 128. 91. 245. 253. 248. 114.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 104. 253. 253. 110.  2. 142. 253.
 90.  0.  0. 26. 199. 253. 248. 63.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  1. 173. 253. 253. 29.  0. 84. 228.
 39.  0.  0.  0. 72. 251. 253. 215. 29.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 36. 253. 253. 203. 13.  0.  0.  0.
  0.  0.  0.  0.  0. 82. 253. 253. 170.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 36. 253. 253. 164.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 11. 198. 253. 184.  6.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 36. 253. 253. 82.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 138. 253. 253. 35.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 128. 253. 253. 47.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 48. 253. 253. 35.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 154. 253. 253. 47.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 48. 253. 253. 35.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 102. 253. 253. 99.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 48. 253. 253. 35.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 36. 253. 253. 164.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 16. 208. 253. 211. 17.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 32. 244. 253. 175.  4.  0.  0.  0.
  0.  0.  0.  0.  0. 44. 253. 253. 156.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 171. 253. 253. 29.  0.  0.  0.
  0.  0.  0.  0. 30. 217. 253. 188. 19.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 171. 253. 253. 59.  0.  0.  0.
  0.  0.  0. 60. 217. 253. 253. 70.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 78. 253. 253. 231. 48.  0.  0.
  0. 26. 128. 249. 253. 244. 94. 15.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  8. 151. 253. 253. 234. 101. 121.
 219. 229. 253. 253. 201. 80.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 38. 232. 253. 253. 253. 253.
 253. 253. 253. 201. 66.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 232. 253. 253. 95.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  3. 86. 46.
  0.  0.  0.  0.  0.  0. 91. 246. 252. 232. 57.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 103. 252. 187.
 13.  0.  0.  0.  0. 22. 219. 252. 252. 175.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 10.  0.  0.  0.  0.  8. 181. 252. 246.
 30.  0.  0.  0.  0. 65. 252. 237. 197. 64.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 87.  0.  0.  0. 13. 172. 252. 252. 104.
  0.  0.  0.  0.  5. 184. 252. 67. 103.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  8. 172. 252. 248. 145. 14.
  0.  0.  0.  0. 109. 252. 183. 137. 64.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  5. 224. 252. 248. 134.  0.  0.
  0.  0.  0. 53. 238. 252. 245. 86.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 12. 174. 252. 223. 88.  0.  0.  0.
  0.  0.  0. 209. 252. 252. 179.  9.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 11. 171. 252. 246. 61.  0.  0.  0.  0.
  0.  0. 83. 241. 252. 211. 14.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 129. 252. 252. 249. 220. 220. 215. 111. 192.
 220. 221. 243. 252. 252. 149.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 144. 253. 253. 253. 253. 253. 253. 253. 253.
 253. 255. 253. 226. 153.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 44. 77. 77. 77. 77. 77. 77. 77. 77.
 153. 253. 235. 32.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 74.
 214. 240. 114.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 24. 221.
 243. 57.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  8. 180. 252.
 119.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 136. 252. 153.
  7.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  3. 136. 251. 226. 34.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 123. 252. 246. 39.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 165. 252. 127.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 165. 175.  3.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] y= [[ 0.58273095 0.50121385 -0.74845004 0.35842288 -0.13741069 -0.5839622
  0.2642774  0.5101677 -0.29416046 0.5471707 ]
 [ 0.58273095 0.50121385 -0.74845004 0.35842288 -0.13741069 -0.5839622
  0.2642774  0.5101677 -0.29416046 0.5471707 ]] y_= [[1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
 [0. 0. 0. 0. 1. 0. 0. 0. 0. 0.]] loss= 2.2801425 softmax= [[0.14659645 0.13512042 0.03872566 0.11714067 0.07134604 0.04564939
 0.10661562 0.13633572 0.06099501 0.14147504]
 [0.14659645 0.13512042 0.03872566 0.11714067 0.07134604 0.04564939
 0.10661562 0.13633572 0.06099501 0.14147504]] cross_entropy= [1.9200717 2.6402135] regularization= 50459690000000.0 50459690000000.0
after 45 trainning steps:
x= [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0. 25. 214. 225. 90.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  7. 145. 212. 253. 253. 60.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0. 106. 253. 253. 246. 188. 23.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 45.
 164. 254. 253. 223. 108.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 24. 236.
 253. 252. 124. 28.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 100. 217. 253.
 218. 116.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 158. 175. 225. 253.
 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 24. 217. 241. 248. 114.
  2.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 21. 201. 253. 253. 114.  3.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 107. 253. 253. 213. 19.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 170. 254. 254. 169.  0.  0.
  0.  0.  0.  2. 13. 100. 133. 89.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 18. 210. 253. 253. 100.  0.  0.
  0. 19. 76. 116. 253. 253. 253. 176.  4.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 41. 222. 253. 208. 18.  0.  0.
 93. 209. 232. 217. 224. 253. 253. 241. 31.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 157. 253. 253. 229. 32.  0. 154.
 250. 246. 36.  0. 49. 253. 253. 168.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 128. 253. 253. 253. 195. 125. 247.
 166. 69.  0.  0. 37. 236. 253. 168.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 37. 253. 253. 253. 253. 253. 135.
 32.  0.  7. 130. 73. 202. 253. 133.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  7. 185. 253. 253. 253. 253. 64.
  0. 10. 210. 253. 253. 253. 153.  9.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 66. 253. 253. 253. 253. 238.
 218. 221. 253. 253. 235. 156. 37.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  5. 111. 228. 253. 253. 253.
 253. 254. 253. 168. 19.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  9. 110. 178. 253.
 253. 249. 63.  5.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
 121. 121. 240. 253. 218. 121. 121. 44.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 17. 107. 184. 240.
 253. 252. 252. 252. 252. 252. 252. 219.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 75. 122. 230. 252. 252. 252.
 253. 252. 252. 252. 252. 252. 252. 239. 56.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 77. 129. 213. 244. 252. 252. 252. 252. 252.
 253. 252. 252. 209. 252. 252. 252. 225.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 240. 252. 252. 252. 252. 252. 252. 213. 185.
 53. 53. 53. 89. 252. 252. 252. 120.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 240. 232. 198. 93. 164. 108. 66. 28.  0.
  0.  0.  0. 81. 252. 252. 222. 24.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 76. 50.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0. 171. 252. 243. 108.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0. 144. 238. 252. 115.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  7. 70. 241. 248. 133. 28.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
 121. 252. 252. 172.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 64.
 255. 253. 209. 21.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 13. 246.
 253. 207. 21.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 10. 172. 252.
 209. 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 13. 168. 252. 252.
 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 43. 208. 252. 241. 53.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 15. 166. 252. 204. 62.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 13. 166. 243. 191. 29.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0. 10. 168. 231. 177.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  6. 172. 241. 50.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0. 177. 202. 19.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] y= [[ 0.8592988  0.3954708 -0.77875614 0.26675048 0.19804694 -0.61968666
  0.18084174 0.4034736 -0.34189415 0.43645462]
 [ 0.8592988  0.3954708 -0.77875614 0.26675048 0.19804694 -0.61968666
  0.18084174 0.4034736 -0.34189415 0.43645462]] y_= [[0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]
 [0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]] loss= 2.2191708 softmax= [[0.19166051 0.12052987 0.0372507 0.10597225 0.09893605 0.04367344
 0.09724841 0.12149832 0.05765821 0.12557226]
 [0.19166051 0.12052987 0.0372507 0.10597225 0.09893605 0.04367344
 0.09724841 0.12149832 0.05765821 0.12557226]] cross_entropy= [2.3304868 2.1078548] regularization= 50459690000000.0 50459690000000.0
after 46 trainning steps:
x= [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0. 196. 99.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  5. 49.  0.  0.  0.
  0.  0.  0. 34. 244. 98.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 89. 135.  0.  0.  0.
  0.  0.  0. 40. 253. 98.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 171. 150.  0.  0.  0.
  0.  0.  0. 40. 253. 98.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 254. 233.  0.  0.  0.
  0.  0.  0. 77. 253. 98.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 255. 136.  0.  0.  0.
  0.  0.  0. 77. 254. 99.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 254. 135.  0.  0.  0.
  0.  0.  0. 123. 253. 98.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 254. 135.  0.  0.  0.
  0.  0.  0. 136. 253. 98.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 16. 254. 135.  0.  0.  0.
  0.  0.  0. 136. 237.  8.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 98. 254. 135.  0.  0. 38.
 99. 98. 98. 219. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 196. 255. 208. 186. 254. 254.
 255. 254. 254. 254. 254.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 105. 254. 253. 239. 180. 135.
 39. 39. 39. 237. 170.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 137. 92. 24.  0.  0.
  0.  0.  0. 234. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0. 13. 237. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0. 79. 253. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0. 31. 242. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0. 61. 248. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0. 234. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0. 234. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0. 196. 155.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0. 50. 236. 255. 124.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
 53. 231. 253. 253. 107.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  9.
 193. 253. 253. 230.  4.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  7. 156.
 253. 253. 149. 36.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 24. 253.
 253. 190.  8.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  3. 175. 253.
 253. 72.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 123. 253. 253.
 138.  3.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 10. 244. 253. 230.
 34.  0.  9. 24. 23.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 181. 253. 249. 123.
  0. 69. 195. 253. 249. 146. 15.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 21. 231. 253. 202.  0.
 70. 236. 253. 253. 253. 253. 170.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 22. 139. 253. 213. 26. 13.
 200. 253. 253. 183. 252. 253. 220. 22.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 72. 253. 253. 129.  0. 86.
 253. 253. 129.  4. 105. 253. 253. 70.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 72. 253. 253. 77. 22. 245.
 253. 183.  4.  0.  2. 105. 253. 70.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 132. 253. 253. 11. 24. 253.
 253. 116.  0.  0.  1. 150. 253. 70.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 189. 253. 241. 10. 24. 253.
 253. 59.  0.  0. 82. 253. 212. 30.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 189. 253. 147.  0. 24. 253.
 253. 150. 30. 44. 208. 212. 31.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 189. 253. 174.  3.  7. 185.
 253. 253. 227. 247. 184. 30.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 150. 253. 253. 145. 95. 234.
 253. 253. 253. 126.  1.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 72. 253. 253. 253. 253. 253.
 253. 253. 169. 14.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  5. 114. 240. 253. 253. 234.
 135. 44.  3.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] y= [[ 0.7093834  0.30119324 -0.80789334 0.1838598  0.12065991 -0.6538477
  0.49587095 0.6995347 -0.38699397 0.33823296]
 [ 0.7093834  0.30119324 -0.80789334 0.1838598  0.12065991 -0.6538477
  0.49587095 0.6995347 -0.38699397 0.33823296]] y_= [[0. 0. 0. 0. 1. 0. 0. 0. 0. 0.]
 [0. 0. 0. 0. 0. 0. 1. 0. 0. 0.]] loss= 2.2107558 softmax= [[0.16371341 0.10884525 0.03590371 0.09679484 0.09086671 0.04188326
 0.1322382 0.16210894 0.05469323 0.11295244]
 [0.16371341 0.10884525 0.03590371 0.09679484 0.09086671 0.04188326
 0.1322382 0.16210894 0.05469323 0.11295244]] cross_entropy= [2.3983614 2.0231504] regularization= 50459690000000.0 50459690000000.0
after 47 trainning steps:
x= [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
 11. 139. 212. 253. 159. 86.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 34. 89.
 203. 253. 252. 252. 252. 252. 74.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 49. 184. 234. 252.
 252. 184. 110. 100. 208. 252. 199.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 95. 233. 252. 252. 176.
 56.  0.  0.  0. 17. 234. 249. 75.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 220. 253. 178. 54.  4.
  0.  0.  0.  0. 43. 240. 243. 50.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 221. 255. 180. 55.  5.
  0.  0.  0.  7. 160. 253. 168.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 116. 253. 252. 252. 67.
  0.  0.  0. 91. 252. 231. 42.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 32. 190. 252. 252. 185.
 38.  0. 119. 234. 252. 54.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 15. 177. 252. 252.
 179. 155. 236. 227. 119.  4.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 26. 221. 252.
 252. 253. 252. 130.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 32. 229.
 253. 255. 144.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 66. 236.
 252. 253. 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 66. 234. 252.
 252. 253. 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 19. 236. 252. 252.
 252. 253. 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 53. 181. 252. 168. 43.
 232. 253. 92.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 179. 255. 218. 32. 93.
 253. 252. 84.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 81. 244. 239. 33.  0. 114.
 252. 209.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 207. 252. 237. 70. 153. 240.
 252. 32.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 207. 252. 253. 252. 252. 252.
 210.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 61. 242. 253. 252. 168. 96.
 12.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] [ 0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
 68. 254. 255. 254. 107.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 11. 176.
 230. 253. 253. 253. 212.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 28. 197. 253.
 253. 253. 253. 253. 229. 107. 14.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 194. 253. 253.
 253. 253. 253. 253. 253. 253. 53.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0. 69. 241. 253. 253.
 253. 253. 241. 186. 253. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 10. 161. 253. 253. 253.
 246. 40. 57. 231. 253. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 140. 253. 253. 253. 253.
 154.  0. 25. 253. 253. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0. 213. 253. 253. 253. 135.
  8.  0.  3. 128. 253. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0. 77. 238. 253. 253. 253.  7.
  0.  0.  0. 116. 253. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 11. 165. 253. 253. 231. 70.  1.
  0.  0.  0. 78. 237. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 33. 253. 253. 253. 182.  0.  0.
  0.  0.  0.  0. 200. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 98. 253. 253. 253. 24.  0.  0.
  0.  0.  0.  0. 42. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 197. 253. 253. 253. 24.  0.  0.
  0.  0.  0.  0. 163. 253. 195.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 197. 253. 253. 189. 13.  0.  0.
  0.  0.  0. 53. 227. 253. 121.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 197. 253. 253. 114.  0.  0.  0.
  0.  0. 21. 227. 253. 231. 27.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 197. 253. 253. 114.  0.  0.  0.
  5. 131. 143. 253. 231. 59.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 197. 253. 253. 236. 73. 58. 217.
 223. 253. 253. 253. 174.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 197. 253. 253. 253. 253. 253. 253.
 253. 253. 253. 253. 48.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 149. 253. 253. 253. 253. 253. 253.
 253. 253. 182. 15.  3.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0. 12. 168. 253. 253. 253. 253. 253.
 248. 89. 23.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.] y= [[ 0.5813921  0.21609789 -0.8359629  0.10818548 0.44052082 -0.6865921
  0.78338754 0.5727978 -0.4297532  0.24992661]
 [ 0.5813921  0.21609789 -0.8359629  0.10818548 0.44052082 -0.6865921
  0.78338754 0.5727978 -0.4297532  0.24992661]] y_= [[0. 0. 0. 0. 0. 0. 0. 0. 1. 0.]
 [1. 0. 0. 0. 0. 0. 0. 0. 0. 0.]] loss= 2.452383 softmax= [[0.14272858 0.09905256 0.03459087 0.08892009 0.1239742 0.04016358
 0.1746773 0.14150718 0.05192496 0.10246069]
 [0.14272858 0.09905256 0.03459087 0.08892009 0.1239742 0.04016358
 0.1746773 0.14150718 0.05192496 0.10246069]] cross_entropy= [2.9579558 1.9468105] regularization= 50459690000000.0 50459690000000.0
已终止
  • 写回答

2条回答 默认 最新

  • 关注
    本回答被题主选为最佳回答 , 对您是否有帮助呢?
    评论
查看更多回答(1条)

报告相同问题?

悬赏问题

  • ¥15 Qt下使用tcp获取数据的详细操作
  • ¥15 idea右下角设置编码是灰色的
  • ¥15 全志H618ROM新增分区
  • ¥15 在grasshopper里DrawViewportWires更改预览后,禁用电池仍然显示
  • ¥15 NAO机器人的录音程序保存问题
  • ¥15 C#读写EXCEL文件,不同编译
  • ¥15 MapReduce结果输出到HBase,一直连接不上MySQL
  • ¥15 扩散模型sd.webui使用时报错“Nonetype”
  • ¥15 stm32流水灯+呼吸灯+外部中断按键
  • ¥15 将二维数组,按照假设的规定,如0/1/0 == "4",把对应列位置写成一个字符并打印输出该字符