Facing ValueError: Shapes (None, None) and (None, 256, 256, 12) are incompatible

Facing ValueError: Shapes (None, None) and (None, 256, 256, 12) are incompatible

İ 正在研究由 12 类 组成的图像数据集的多类分类的迁移学习。因此,我正在使用 VGG19。但是,我遇到了一个错误,即 Facing ValueError: Shapes (None, None) and (None, 256, 256, 12) are incompatible。此外,İ也有扁平层

我的代码:

from tensorflow.keras.callbacks import ReduceLROnPlateau
#Learning Rate Annealer
lrr= ReduceLROnPlateau(monitor='val_acc', factor=.01, patience=3, min_lr=1e-5)
from tensorflow.keras.applications import VGG19 #For Transfer Learning
#Defining the VGG Convolutional Neural Net
base_model = VGG19(include_top = False, weights = 'imagenet')
from tensorflow.keras.layers import Flatten,Dense,BatchNormalization,Activation,Dropout

#Adding the final layers to the above base models where the actual classification is done in the dense layers
model= Sequential()
model.add(base_model) 
model.add(Flatten()) 

# Create a `Sequential` model and add a Dense layer as the first layer.
model = tf.keras.models.Sequential()
model.add(tf.keras.Input(shape=(256,256,3)))
model.add(tf.keras.layers.Dense(32, activation='relu'))
# Now the model will take as input arrays of shape (None, 16)
# and output arrays of shape (None, 32).
# Note that after the first layer, you don't need to specify
# the size of the input anymore:
model.add(tf.keras.layers.Dense(32))
model.output_shape

#Adding the Dense layers along with activation and batch normalization
model.add(Dense(1024,activation=('relu'),input_dim=256))
model.add(Dense(512,activation=('relu'))) 
model.add(Dense(128,activation=('relu'))) 
model.add(Dropout(.3))
#model.add(Dropout(.2))
model.add(Dense(12,activation=('softmax'))) 

#Checking the final model summary
model.summary()

from tensorflow.keras import optimizers

model.compile(optimizer = optimizers.Adam(learning_rate=0.5), loss='categorical_crossentropy', metrics=["accuracy"])

from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
checkpoint = ModelCheckpoint("vgg16_1.h5", monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, period=1)
history = model.fit(
    train_data,
    validation_data=valid_data,
    batch_size = 32,
    epochs=10,
    callbacks=[
        tf.keras.callbacks.EarlyStopping(
            monitor='val_loss',
            patience=2,
            restore_best_weights=True
        )
    ]
)
model_final.save_weights("vgg16_1.h5")

详细错误:

ValueError                                Traceback (most recent call last)
<ipython-input-73-c4ac91bd242e> in <module>()
     10             monitor='val_loss',
     11             patience=2,
---> 12             restore_best_weights=True
     13         )
     14     ]

9 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    984           except Exception as e:  # pylint:disable=broad-except
    985             if hasattr(e, "ag_error_metadata"):
--> 986               raise e.ag_error_metadata.to_exception(e)
    987             else:
    988               raise

ValueError: in user code:

    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:855 train_function  *
        return step_function(self, iterator)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:845 step_function  **
        outputs = model.distribute_strategy.run(run_step, args=(data,))
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
        return self._call_for_each_replica(fn, args, kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
        return fn(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:838 run_step  **
        outputs = model.train_step(data)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:797 train_step
        y, y_pred, sample_weight, regularization_losses=self.losses)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/compile_utils.py:204 __call__
        loss_value = loss_obj(y_t, y_p, sample_weight=sw)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/losses.py:155 __call__
        losses = call_fn(y_true, y_pred)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/losses.py:259 call  **
        return ag_fn(y_true, y_pred, **self._fn_kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper
        return target(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/losses.py:1644 categorical_crossentropy
        y_true, y_pred, from_logits=from_logits)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper
        return target(*args, **kwargs)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/backend.py:4862 categorical_crossentropy
        target.shape.assert_is_compatible_with(output.shape)
    /usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/tensor_shape.py:1161 assert_is_compatible_with
        raise ValueError("Shapes %s and %s are incompatible" % (self, other))

    ValueError: Shapes (None, None) and (None, 256, 256, 12) are incompatible

@Frightera 在评论中提到,您已经定义了 2 次 Sequential。
而且我必须补充一点,您不必从第一次就将模型复杂化,尝试 运行 一个简单的模型,因为 VGG19 会为您完成所有工作。
在 VGG19 之后添加许多 Dense 层并不意味着您会获得更好的分数,因为层数是一个超参数。
也尝试在开始时将学习率固定为 0.1、0.05 或 0.01。

from tensorflow.keras.callbacks import ReduceLROnPlateau
from tensorflow.keras.layers import Flatten,Dense,BatchNormalization,Activation,Dropout
from tensorflow.keras import optimizers

lrr= ReduceLROnPlateau(monitor='val_acc', factor=.01, patience=3, min_lr=1e-5)
from tensorflow.keras.applications import VGG19 #For Transfer Learning

base_model = VGG19(weights='imagenet', input_shape=(256, 256, 3), include_top=False)
inputs = keras.Input(shape=(256, 256, 3))
x = base_model(inputs, training=False)
x = Flatten()(x)
x = Dense(32, activation='relu')(x)
outputs = Dense(12,activation='softmax')(x)
model = keras.Model(inputs, outputs)

model.summary()

model.compile(optimizer = optimizers.Adam(learning_rate=0.05), loss='categorical_crossentropy', metrics=["accuracy"])