from keras import layers
from keras import models
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
#这些分类器可以处理 1D 向量,而当前的输出是 3D 张量。
#首先,我们需要将 3D 输出展平为 1D,然后在上面添加几个 Dense 层。
from keras.datasets import mnist
from keras.utils import np_utils
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
train_images = train_images.reshape((60000, 28, 28, 1))
train_images = train_images.astype('float32') / 255
test_images = test_images.reshape((10000, 28, 28, 1))
test_images = test_images.astype('float32') / 255
train_labels = np_utils.to_categorical(train_labels)
test_labels = np_utils.to_categorical(test_labels)
前面这些没问题,后面这段就报错。
model.compile(optimizer='rmsprop',loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(train_images, train_labels, epochs=5, batch_size=64)
报错:
---------------------------------------------------------------------------
InternalError Traceback (most recent call last)
C:\Users\LOISLU~1\AppData\Local\Temp/ipykernel_12408/247866327.py in <module>
1 model.compile(optimizer='rmsprop',loss='categorical_crossentropy', metrics=['accuracy'])
----> 2 model.fit(train_images, train_labels, epochs=5, batch_size=64)
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1106 training_utils.RespectCompiledTrainableState(self):
1107 # Creates a `tf.data.Dataset` and handles batch and epoch iteration.
-> 1108 data_handler = data_adapter.get_data_handler(
1109 x=x,
1110 y=y,
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\keras\engine\data_adapter.py in get_data_handler(*args, **kwargs)
1346 if getattr(kwargs["model"], "_cluster_coordinator", None):
1347 return _ClusterCoordinatorDataHandler(*args, **kwargs)
-> 1348 return DataHandler(*args, **kwargs)
1349
1350
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\keras\engine\data_adapter.py in __init__(self, x, y, sample_weight, batch_size, steps_per_epoch, initial_epoch, epochs, shuffle, class_weight, max_queue_size, workers, use_multiprocessing, model, steps_per_execution, distribute)
1132 else:
1133 self._steps_per_execution = steps_per_execution
-> 1134 self._steps_per_execution_value = steps_per_execution.numpy().item()
1135
1136 adapter_cls = select_data_adapter(x, y)
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\tensorflow\python\ops\resource_variable_ops.py in numpy(self)
626 def numpy(self):
627 if context.executing_eagerly():
--> 628 return self.read_value().numpy()
629 raise NotImplementedError(
630 "numpy() is only available when eager execution is enabled.")
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\tensorflow\python\framework\ops.py in numpy(self)
1092 """
1093 # TODO(slebedev): Consider avoiding a copy for non-CPU or remote tensors.
-> 1094 maybe_arr = self._numpy() # pylint: disable=protected-access
1095 return maybe_arr.copy() if isinstance(maybe_arr, np.ndarray) else maybe_arr
1096
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\tensorflow\python\framework\ops.py in _numpy(self)
1060 return self._numpy_internal()
1061 except core._NotOkStatusException as e: # pylint: disable=protected-access
-> 1062 six.raise_from(core._status_to_exception(e.code, e.message), None) # pylint: disable=protected-access
1063
1064 @property
D:\QLDownload\AnacondaDon\envs\tensorflow_gpu2021\lib\site-packages\six.py in raise_from(value, from_value)
InternalError: stream did not block host until done; was already in an error state