检查输入时出错:预期 flatten_1_input 有 3 个维度,但得到形状为 (28, 28) 的数组
Error when checking input: expected flatten_1_input to have 3 dimensions, but got array with shape (28, 28)
这是代码:
image = cv2.imread('MNIST_IMAGE.png')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
data = asarray(gray)
data=data/255.0
predictions=model.predict(data)
这是错误,我得到:
ValueError Traceback (most recent call
last) in
3 data = asarray(gray)
4 data=data/255.0
----> 5 predictions=model.predict(data)
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training.py
in predict(self, x, batch_size, verbose, steps, callbacks,
max_queue_size, workers, use_multiprocessing) 1011
max_queue_size=max_queue_size, 1012 workers=workers,
-> 1013 use_multiprocessing=use_multiprocessing) 1014 1015 def reset_metrics(self):
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py
in predict(self, model, x, batch_size, verbose, steps, callbacks,
max_queue_size, workers, use_multiprocessing, **kwargs)
496 model, ModeKeys.PREDICT, x=x, batch_size=batch_size, verbose=verbose,
497 steps=steps, callbacks=callbacks, max_queue_size=max_queue_size,
--> 498 workers=workers, use_multiprocessing=use_multiprocessing, **kwargs)
499
500
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py
in _model_iteration(self, model, mode, x, y, batch_size, verbose,
sample_weight, steps, callbacks, max_queue_size, workers,
use_multiprocessing, **kwargs)
424 max_queue_size=max_queue_size,
425 workers=workers,
--> 426 use_multiprocessing=use_multiprocessing)
427 total_samples = _get_total_number_of_samples(adapter)
428 use_sample = total_samples is not None
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py
in _process_inputs(model, mode, x, y, batch_size, epochs,
sample_weights, class_weights, shuffle, steps, distribution_strategy,
max_queue_size, workers, use_multiprocessing)
644 standardize_function = None
645 x, y, sample_weights = standardize(
--> 646 x, y, sample_weight=sample_weights)
647 elif adapter_cls is data_adapter.ListsOfScalarsDataAdapter:
648 standardize_function = standardize
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training.py
in _standardize_user_data(self, x, y, sample_weight, class_weight,
batch_size, check_steps, steps_name, steps, validation_split, shuffle,
extract_tensors_from_dataset) 2381 is_dataset=is_dataset,
2382 class_weight=class_weight,
-> 2383 batch_size=batch_size) 2384 2385 def _standardize_tensors(self, x, y, sample_weight, run_eagerly, dict_inputs,
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training.py
in _standardize_tensors(self, x, y, sample_weight, run_eagerly,
dict_inputs, is_dataset, class_weight, batch_size) 2408
feed_input_shapes, 2409 check_batch_axis=False, # Don't
enforce the batch size.
-> 2410 exception_prefix='input') 2411 2412 # Get typespecs for the input data and sanitize it if necessary.
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_utils.py
in standardize_input_data(data, names, shapes, check_batch_axis,
exception_prefix)
571 ': expected ' + names[i] + ' to have ' +
572 str(len(shape)) + ' dimensions, but got array '
--> 573 'with shape ' + str(data_shape))
574 if not check_batch_axis:
575 data_shape = data_shape[1:]
ValueError: Error when checking input: expected flatten_1_input to
have 3 dimensions, but got array with shape (28, 28)
添加批量维度:
predictions = model.predict(data[None, ...])
或者像这样(两者等价):
predictions = model.predict(np.expand_dims(data, 0))
这是代码:
image = cv2.imread('MNIST_IMAGE.png')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
data = asarray(gray)
data=data/255.0
predictions=model.predict(data)
这是错误,我得到:
ValueError Traceback (most recent call last) in 3 data = asarray(gray) 4 data=data/255.0 ----> 5 predictions=model.predict(data)
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training.py in predict(self, x, batch_size, verbose, steps, callbacks, max_queue_size, workers, use_multiprocessing) 1011
max_queue_size=max_queue_size, 1012 workers=workers, -> 1013 use_multiprocessing=use_multiprocessing) 1014 1015 def reset_metrics(self):~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in predict(self, model, x, batch_size, verbose, steps, callbacks, max_queue_size, workers, use_multiprocessing, **kwargs) 496 model, ModeKeys.PREDICT, x=x, batch_size=batch_size, verbose=verbose, 497 steps=steps, callbacks=callbacks, max_queue_size=max_queue_size, --> 498 workers=workers, use_multiprocessing=use_multiprocessing, **kwargs) 499 500
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _model_iteration(self, model, mode, x, y, batch_size, verbose, sample_weight, steps, callbacks, max_queue_size, workers, use_multiprocessing, **kwargs) 424 max_queue_size=max_queue_size, 425 workers=workers, --> 426 use_multiprocessing=use_multiprocessing) 427 total_samples = _get_total_number_of_samples(adapter) 428 use_sample = total_samples is not None
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _process_inputs(model, mode, x, y, batch_size, epochs, sample_weights, class_weights, shuffle, steps, distribution_strategy, max_queue_size, workers, use_multiprocessing) 644 standardize_function = None 645 x, y, sample_weights = standardize( --> 646 x, y, sample_weight=sample_weights) 647 elif adapter_cls is data_adapter.ListsOfScalarsDataAdapter: 648 standardize_function = standardize
~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, batch_size, check_steps, steps_name, steps, validation_split, shuffle, extract_tensors_from_dataset) 2381 is_dataset=is_dataset,
2382 class_weight=class_weight, -> 2383 batch_size=batch_size) 2384 2385 def _standardize_tensors(self, x, y, sample_weight, run_eagerly, dict_inputs,~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_tensors(self, x, y, sample_weight, run_eagerly, dict_inputs, is_dataset, class_weight, batch_size) 2408
feed_input_shapes, 2409 check_batch_axis=False, # Don't enforce the batch size. -> 2410 exception_prefix='input') 2411 2412 # Get typespecs for the input data and sanitize it if necessary.~\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\training_utils.py in standardize_input_data(data, names, shapes, check_batch_axis, exception_prefix) 571 ': expected ' + names[i] + ' to have ' + 572 str(len(shape)) + ' dimensions, but got array ' --> 573 'with shape ' + str(data_shape)) 574 if not check_batch_axis: 575 data_shape = data_shape[1:]
ValueError: Error when checking input: expected flatten_1_input to have 3 dimensions, but got array with shape (28, 28)
添加批量维度:
predictions = model.predict(data[None, ...])
或者像这样(两者等价):
predictions = model.predict(np.expand_dims(data, 0))