ValueError: Shapes (None, None) and (None, 28, 28, 12) are incompatible

ValueError: Shapes (None, None) and (None, 28, 28, 12) are incompatible

İ 正在处理分类为 12 类 的图像数据集。我正在使用 VGG16 的迁移学习。但是,İ 遇到了错误:Shapes (None, None) 和 (None, 28, 28, 12) 不兼容。 我的代码:

IMAGE_SHAPE = (224, 224)
BATCH_SIZE = 32

train = ImageDataGenerator()
train_generator = tf.keras.preprocessing.image.ImageDataGenerator(rescale= 1./255, fill_mode= 'nearest')
train_data = train_generator.flow_from_directory(directory="path",target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical', batch_size=BATCH_SIZE , shuffle = True )

valid = ImageDataGenerator()
validation_generator = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
valid_data = validation_generator.flow_from_directory(directory="path, target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical' , batch_size=BATCH_SIZE , shuffle = True )

test = ImageDataGenerator()
test_generator = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
test_data = test_generator.flow_from_directory(directory='path',target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical' , batch_size=1 , shuffle = False )
test_data.reset()

from keras.applications.vgg19 import VGG19
vggmodel = VGG19(weights='imagenet', include_top=True)

for layers in (vggmodel.layers)[:32]:
    print(layers)
    layers.trainable = False

import tensorflow as tf
tf.keras.Model
tf.keras.layers.Dense
from keras import optimizers
tf.keras.preprocessing.image.ImageDataGenerator

tf.keras.preprocessing.image.load_img
X= vggmodel.layers[-12].output
flatten = tf.keras.layers.Flatten()(X)
out = vggmodel.layers(flatten)
predictions = tf.keras.layers.Dense(num_classes, activation='softmax')
model_final = Model(vggmodel.input, predictions)
    model_final.compile(optimizer = optimizers.Adam(lr=0.0003), loss='categorical_crossentropy', metrics=["accuracy"])
    
for image_batch, labels_batch in train_data:
  print(image_batch.shape)
  print(labels_batch.shape)
  break

from keras.callbacks import ModelCheckpoint, EarlyStopping
checkpoint = ModelCheckpoint("vgg16_1.h5", monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
early = EarlyStopping(monitor='val_acc',patience=40, verbose=1, mode='auto')
model_final.fit_generator(generator= train_data, steps_per_epoch= 2, epochs= 100, validation_data= valid_data, validation_steps=1, callbacks=[checkpoint,early])
model_final.save_weights("vgg16_1.h5")

错误:ValueError: Shapes (None, None) and (None, 28, 28, 12) are incompatible[![在此处输入图片描述]

错误详情:

ValueError                                Traceback (most recent call last)
<ipython-input-39-938295cc34c4> in <module>()
      2 checkpoint = ModelCheckpoint("vgg16_1.h5", monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
      3 early = EarlyStopping(monitor='val_acc', min_delta=0, patience=40, verbose=1, mode='auto')
----> 4 model_final.fit_generator(generator= train_images , steps_per_epoch= 2, epochs= 100, validation_data= val_images , validation_steps=1, callbacks=[checkpoint,early])
      5 model_final.save_weights("vgg16_1.h5")

10 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    984           except Exception as e:  # pylint:disable=broad-except
    985             if hasattr(e, "ag_error_metadata"):
--> 986               raise e.ag_error_metadata.to_exception(e)
    987             else:
    988               raise

ValueError: in user code:

    /usr/local/lib/python3.7/dist-packages/keras/engine/training.py:830 train_function  *
        return step_function(self, iterator)
    /usr/local/lib/python3.7/dist-packages/keras/engine/training.py:813 run_step  *
        outputs = model.train_step(data)
    /usr/local/lib/python3.7/dist-packages/keras/engine/training.py:771 train_step  *
        loss = self.compiled_loss(
    /usr/local/lib/python3.7/dist-packages/keras/engine/compile_utils.py:201 __call__  *
        loss_value = loss_obj(y_t, y_p, sample_weight=sw)
    /usr/local/lib/python3.7/dist-packages/keras/losses.py:142 __call__  *
        losses = call_fn(y_true, y_pred)
    /usr/local/lib/python3.7/dist-packages/keras/losses.py:246 call  *
        return ag_fn(y_true, y_pred, **self._fn_kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper  **
        return target(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/keras/losses.py:1631 categorical_crossentropy
        y_true, y_pred, from_logits=from_logits)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper
        return target(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/keras/backend.py:4827 categorical_crossentropy
        target.shape.assert_is_compatible_with(output.shape)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/tensor_shape.py:1161 assert_is_compatible_with
        raise ValueError("Shapes %s and %s are incompatible" % (self, other))

    ValueError: Shapes (None, None) and (None, 28, 28, 12) are incompatible

1]1

更新代码:

image_dir = Path('../content/dataset')

# Get filepaths and labels
filepaths = list(image_dir.glob(r'**/*.png'))
labels = list(map(lambda x: os.path.split(os.path.split(x)[0])[1], filepaths))

def create_gen():
    # Load the Images with a generator and Data Augmentation
    train_generator = tf.keras.preprocessing.image.ImageDataGenerator(
        preprocessing_function=tf.keras.applications.vgg16.preprocess_input,
        validation_split=0.1
    )

    test_generator = tf.keras.preprocessing.image.ImageDataGenerator(
        preprocessing_function=tf.keras.applications.vgg16.preprocess_input
    )

    train_images = train_generator.flow_from_dataframe(
        dataframe=train_df,
        x_col='Filepath',
        y_col='Label',
        target_size=(224, 224),
        color_mode='rgb',
        class_mode='categorical',
        batch_size=32,
        shuffle=True,
        seed=0,
        subset='training',
        rotation_range=30, # Uncomment to use data augmentation
        zoom_range=0.15,
        width_shift_range=0.2,
        height_shift_range=0.2,
        shear_range=0.15,
        horizontal_flip=True,
        fill_mode="nearest"
    )

    val_images = train_generator.flow_from_dataframe(
        dataframe=train_df,
        x_col='Filepath',
        y_col='Label',
        target_size=(224, 224),
        color_mode='rgb',
        class_mode='categorical',
        batch_size=32,
        shuffle=True,
        seed=0,
        subset='validation',
        rotation_range=30, # Uncomment to use data augmentation
        zoom_range=0.15,
        width_shift_range=0.2,
        height_shift_range=0.2,
        shear_range=0.15,
        horizontal_flip=True,
        fill_mode="nearest"
    )

    test_images = test_generator.flow_from_dataframe(
        dataframe=test_df,
        x_col='Filepath',
        y_col='Label',
        target_size=(224, 224),
        color_mode='rgb',
        class_mode='categorical',
        batch_size=32,
        shuffle=False
    )
    
    return train_generator,test_generator,train_images,val_images,test_images

from keras.applications.vgg16 import VGG16
vggmodel = VGG16(weights='imagenet', include_top=True)

for layers in (vggmodel.layers)[:256]:
    print(layers)
    layers.trainable = False

X= vggmodel.layers[-12].output
predictions = Dense(12, activation="softmax")(X)
model_final = Model(vggmodel.input, predictions)

model_final.compile(optimizer = optimizers.Adam(lr=0.0003), loss='categorical_crossentropy', metrics=["accuracy"])

# Separate in train and test data
train_df, test_df = train_test_split(image_df, train_size=0.9, shuffle=True, random_state=1)

# Create the generators
train_generator,test_generator,train_images,val_images,test_images = create_gen()

from keras.callbacks import ModelCheckpoint, EarlyStopping
checkpoint = ModelCheckpoint("vgg16_1.h5", monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
early = EarlyStopping(monitor='val_acc', min_delta=0, patience=40, verbose=1, mode='auto')
model_final.fit_generator(generator= train_images, steps_per_epoch= 2, epochs= 100, validation_data= val_images, validation_steps=1, callbacks=[checkpoint,early])
model_final.save_weights("vgg16_1.h5")

详细信息错误:

valueError                                Traceback (most recent call last)
<ipython-input-56-5210d7f2da32> in <module>()
      2 checkpoint = ModelCheckpoint("vgg16_1.h5", monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
      3 early = EarlyStopping(monitor='val_acc', min_delta=0, patience=40, verbose=1, mode='auto')
----> 4 model_final.fit_generator(generator= train_images, steps_per_epoch= 2, epochs= 100, validation_data= val_images, validation_steps=1, callbacks=[checkpoint,early])
      5 model_final.save_weights("vgg16_1.h5")

10 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    984           except Exception as e:  # pylint:disable=broad-except
    985             if hasattr(e, "ag_error_metadata"):
--> 986               raise e.ag_error_metadata.to_exception(e)
    987             else:
    988               raise

ValueError: in user code:

    /usr/local/lib/python3.7/dist-packages/keras/engine/training.py:830 train_function  *
        return step_function(self, iterator)
    /usr/local/lib/python3.7/dist-packages/keras/engine/training.py:813 run_step  *
        outputs = model.train_step(data)
    /usr/local/lib/python3.7/dist-packages/keras/engine/training.py:771 train_step  *
        loss = self.compiled_loss(
    /usr/local/lib/python3.7/dist-packages/keras/engine/compile_utils.py:201 __call__  *
        loss_value = loss_obj(y_t, y_p, sample_weight=sw)
    /usr/local/lib/python3.7/dist-packages/keras/losses.py:142 __call__  *
        losses = call_fn(y_true, y_pred)
    /usr/local/lib/python3.7/dist-packages/keras/losses.py:246 call  *
        return ag_fn(y_true, y_pred, **self._fn_kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper  **
        return target(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/keras/losses.py:1631 categorical_crossentropy
        y_true, y_pred, from_logits=from_logits)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper
        return target(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/keras/backend.py:4827 categorical_crossentropy
        target.shape.assert_is_compatible_with(output.shape)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/tensor_shape.py:1161 assert_is_compatible_with
        raise ValueError("Shapes %s and %s are incompatible" % (self, other))

    ValueError: Shapes (None, None) and (None, 28, 28, 12) are incompatible

你的代码有很多小错误:

  • 您在使用生成器时使用的是字符串 path 而不是变量 path
  • 训练路径、验证路径和测试路径也应该不同。
  • 您没有为 VGG19 型号指定 input_tensor

你的代码应该是这样的:

#train_dir_path is path to your training images
train_data = train_generator.flow_from_directory(directory=train_dir_path,target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical', batch_size=BATCH_SIZE , shuffle = True )
#valid_dir_path is path to your validation images
valid_data = validation_generator.flow_from_directory(directory=valid_dir_path, target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical' , batch_size=BATCH_SIZE , shuffle = True )

  • VGG 模型的输出在传递到 Dense 层之前应该被展平

完整代码:

IMAGE_SHAPE = (224, 224)
BATCH_SIZE = 32

train = ImageDataGenerator()
train_generator = tf.keras.preprocessing.image.ImageDataGenerator(rescale= 1./255, fill_mode= 'nearest')
#train_dir_path is path to your training images
train_data = train_generator.flow_from_directory(directory=train_dir_path,target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical', batch_size=BATCH_SIZE , shuffle = True )

valid = ImageDataGenerator()
validation_generator = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
#valid_dir_path is path to your validation images
valid_data = validation_generator.flow_from_directory(directory=valid_dir_path, target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical' , batch_size=BATCH_SIZE , shuffle = True )

test = ImageDataGenerator()
test_generator = tf.keras.preprocessing.image.ImageDataGenerator(rescale=1./255)
#test_dir_path is path to your test images
test_data = test_generator.flow_from_directory(directory=test_dir_path,target_size=IMAGE_SHAPE , color_mode="rgb" , class_mode='categorical' , batch_size=1 , shuffle = False )
test_data.reset()

from keras.applications.vgg19 import VGG19
vggmodel = VGG19(weights='imagenet', include_top=True, input_tensor=tensorflow.keras.layers.Input(shape=(224,224,3)))

for layers in (vggmodel.layers)[:32]:
    print(layers)
    layers.trainable = False

X= vggmodel.layers[-12].output
X = tensorflow.keras.layers.Flatten()(X)
predictions = Dense(12, activation="softmax")(X)
model_final = Model(vggmodel.input, predictions)

model_final.compile(optimizer = optimizers.Adam(lr=0.0003), loss='categorical_crossentropy', metrics=["accuracy"])

for image_batch, labels_batch in train_data:
  print(image_batch.shape)
  print(labels_batch.shape)
  break

from keras.callbacks import ModelCheckpoint, EarlyStopping
checkpoint = ModelCheckpoint("vgg16_1.h5", monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1)
early = EarlyStopping(monitor='val_acc',patience=40, verbose=1, mode='auto')
model_final.fit_generator(generator= train_data, steps_per_epoch= 2, epochs= 100, validation_data= valid_data, validation_steps=1, callbacks=[checkpoint,early])
model_final.save_weights("vgg16_1.h5")