模型进行训练的时候在优化器运行部分出现了 'numpy.dtype' object has no attribute 'is_floating'
<ipython-input-1-12256e71d6bc> in <module>
260 print('\nPCC training')
261 start = time.time()
--> 262 source_acc, target_acc = train_and_evaluate('pcc')
263 sio.savemat('PCC1.mat',{'a':target_y})
264 end = time.time()
<ipython-input-1-12256e71d6bc> in train_and_evaluate(training_mode, num_steps, verbose)
221 y = np.vstack([y0, y1])
222 pred_loss, coral_loss, cycle_loss, total_loss, classify_labels, pred= loss_function(Fmodel,X,y,True)
--> 223 pcc_train_op = tf.train.MomentumOptimizer(learning_rate, 0.9).minimize(lambda:total_loss)
224 #Evaluation
225 correct_label_pred = tf.equal(tf.argmax(classify_labels, 1),tf.argmax(pred_labels,1))
~/anaconda3/envs/alex/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in minimize(self, loss, global_step, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, name, grad_loss)
401 aggregation_method=aggregation_method,
402 colocate_gradients_with_ops=colocate_gradients_with_ops,
--> 403 grad_loss=grad_loss)
404
405 vars_with_grad = [v for g, v in grads_and_vars if g is not None]
~/anaconda3/envs/alex/lib/python3.6/site-packages/tensorflow/python/training/optimizer.py in compute_gradients(self, loss, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, grad_loss)
473 # to be executed.
474 with ops.control_dependencies([loss_value]):
--> 475 grads = tape.gradient(loss_value, var_list, grad_loss)
476 return list(zip(grads, var_list))
477
~/anaconda3/envs/alex/lib/python3.6/site-packages/tensorflow/python/eager/backprop.py in gradient(self, target, sources, output_gradients, unconnected_gradients)
948 flat_targets = []
949 for t in nest.flatten(target):
--> 950 if not t.dtype.is_floating:
951 logging.vlog(
952 logging.WARN, "The dtype of the target tensor must be "
AttributeError: 'numpy.dtype' object has no attribute 'is_floating'
一直解决不了