Keras 不允许构建内部具有自定义层的模型

Keras will not allow building a model that has custom layers inside

这是我的 Resnet 代码:

class Resnet50(keras.Model):
    def __init__(self):
        super().__init__()

    def RBB(self, input_shape, K):
        inputs = keras.Input(input_shape)
        X = layers.Conv2D(K, kernel_size=(1, 1), activation="relu")(inputs)
        X = layers.Conv2D(K, kernel_size=(1, 1), activation="relu")(X)
        X = layers.Conv2D(K, kernel_size=(1, 1), activation="relu")(X)
        skip_conn = inputs + X
        output = keras.activations.relu(skip_conn)
        model = keras.Model(inputs=inputs, outputs=output)
        return model

    def complete_network(self, input_shape, RBB=RBB):
        inputs = keras.Input(input_shape)
        X = layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(inputs)
        X = layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(X)
        X = RBB(K=64)(X)
        X = RBB(K=64)(X)
        X = RBB(K=64)(X)
        X = RBB(K=128)(X)
        X = layers.MaxPooling2D(pool_size=(2, 2))(X)
        X = RBB(K=128)(X)
        X = RBB(K=128)(X)
        X = RBB(K=128)(X)
        X = RBB(K=256)(X)
        X = layers.MaxPooling2D(pool_size=(2, 2))(X)    
        X = RBB(K=256)(X)
        X = RBB(K=256)(X)
        X = RBB(K=256)(X)
        X = RBB(K=256)(X)
        X = RBB(K=256)(X)
        X = RBB(K=512)(X)
        X = layers.MaxPooling2D(pool_size=(2, 2))(X)  
        X = RBB(K=512)(X)
        X = RBB(K=512)(X)
        X = layers.AveragePooling2D(pool_size=(2, 2))(X)
        X = layers.Flatten()(X)
        X = layers.Dense(1056)(X)
        output = layers.Dense(num_classes, activation="softmax")
        model = keras.Model(inputs=inputs, outputs=output)
        return model

    def call(self, inputs):
        x = self.complete_network(inputs)
        return x

net = Resnet50()
net.build(input_shape=(224,224))

net.summary()

这是我运行汇总操作时的错误:

OperatorNotAllowedInGraphError            Traceback (most recent call last)

/usr/local/lib/python3.7/dist-packages/keras/engine/training.py in build(self, input_shape)
    439         try:
--> 440           self.call(x, **kwargs)
    441         except (tf.errors.InvalidArgumentError, TypeError) as e:

5 frames

OperatorNotAllowedInGraphError: iterating over `tf.Tensor` is not allowed in Graph execution. Use Eager execution or decorate this function with @tf.function.


During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)

/usr/local/lib/python3.7/dist-packages/keras/engine/training.py in build(self, input_shape)
    440           self.call(x, **kwargs)
    441         except (tf.errors.InvalidArgumentError, TypeError) as e:
--> 442           raise ValueError('You cannot build your model by calling `build` '
    443                            'if your layers do not support float type inputs. '
    444                            'Instead, in order to instantiate and build your '

ValueError: You cannot build your model by calling `build` if your layers do not support float type inputs. Instead, in order to instantiate and build your model, call your model on real tensor data (of the correct dtype).

The actual error from `call` is: iterating over `tf.Tensor` is not allowed in Graph execution. Use Eager execution or decorate this function with @tf.function..

知道如何修复此代码吗?

您正在一个模型中构建多个模型,在一个调用中创建一个模型,并在没有 keras 层的情况下创建张量。所有这一切都很奇怪,问题可能出在任何地方。

(嗯...可以在models里面创建models,其他的就太奇怪了)

只需使用 keras 层构建一个模型即可:

def skipFunc(inputs):
    return inputs[0] + inputs[1]       

def RBB(inputTensor, K):
    X = layers.Conv2D(K, kernel_size=(1, 1), activation="relu")(inputTensor)
    X = layers.Conv2D(K, kernel_size=(1, 1), activation="relu")(X)
    X = layers.Conv2D(K, kernel_size=(1, 1), activation="relu")(X)
    
    skip_conn = layers.Lambda(skipFunc)([X, inputTensor])
    output = layers.Activation("relu")(skip_conn)
    
    return output


def complete_network(input_shape, RBB=RBB):
    inputs = keras.Input(input_shape)
    X = layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(inputs)
    X = layers.Conv2D(64, kernel_size=(3, 3), activation="relu")(X)
    X = RBB(X, K=64)
    X = RBB(X, K=64)
    X = RBB(X, K=64)
    X = RBB(X, K=128)
    X = layers.MaxPooling2D(pool_size=(2, 2))(X)
    X = RBB(X, K=128)
    X = RBB(X, K=128)
    X = RBB(X, K=128)
    X = RBB(X, K=256)
    X = layers.MaxPooling2D(pool_size=(2, 2))(X)    
    X = RBB(X, K=256)
    X = RBB(X, K=256)
    X = RBB(X, K=256)
    X = RBB(X, K=256)
    X = RBB(X, K=256)
    X = RBB(X, K=512)
    X = layers.MaxPooling2D(pool_size=(2, 2))(X)  
    X = RBB(X, K=512)
    X = RBB(X, K=512)
    X = layers.AveragePooling2D(pool_size=(2, 2))(X)
    X = layers.Flatten()
    X = layers.Dense(1056)
    output = layers.Dense(num_classes, activation="softmax")
    model = keras.Model(inputs=inputs, outputs=output)
    return model

net = complete_network(input_shape=(224,224))
net.summary()