如何在连接层时添加可训练的权重

How can I add trainable weights while concatenating layers

我正在尝试连接两个层,以便在连接时为层分配可训练的权重。这背后的想法是我的模型可以确定在连接时应该给哪一层更高的权重。

我已阅读此代码[

class WeightedAverage(Layer):

def __init__(self, n_output):
    super(WeightedAverage, self).__init__()
    self.W = tf.Variable(initial_value=tf.random.uniform(shape=[1,1,n_output], minval=0, maxval=1),
        trainable=True) # (1,1,n_inputs)

def call(self, inputs):

    # inputs is a list of tensor of shape [(n_batch, n_feat), ..., (n_batch, n_feat)]
    # expand last dim of each input passed [(n_batch, n_feat, 1), ..., (n_batch, n_feat, 1)]
    inputs = [tf.expand_dims(i, -1) for i in inputs]
    inputs = Concatenate(axis=-1)(inputs) # (n_batch, n_feat, n_inputs)
    weights = tf.nn.softmax(self.W, axis=-1) # (1,1,n_inputs)
    # weights sum up to one on last dim

    return tf.reduce_sum(weights*inputs, axis=-1) # (n_batch, n_feat)

但是这个执行层的加权平均。请帮忙。如果您有更多问题,请告诉我。谢谢

我使用了加权总和(不是平均值)来达到类似的效果

class WeightedSum(layers.Layer):
    """A custom keras layer to learn a weighted sum of tensors"""

    def __init__(self, **kwargs):
        super(WeightedSum, self).__init__(**kwargs)

    def build(self, input_shape=1):
        self.a = self.add_weight(name='alpha',
                                 shape=(1),
                                 initializer=tf.keras.initializers.Constant(0.5),
                                 dtype='float32',
                                 trainable=True,
                                 constraint=tf.keras.constraints.min_max_norm(
                                     max_value=1, min_value=0))
        super(WeightedSum, self).build(input_shape)

    def call(self, model_outputs):
        return self.a * model_outputs[0] + (1 - self.a) * model_outputs[1]

    def compute_output_shape(self, input_shape):
        return input_shape[0]

你可以做一个功能模型:

W = tf.Variable(1 ,trainable=True )
input1 = tf.keras.Input(shape=(32,))
input2 = tf.keras.Input(shape=(32,))

x1 = tf.keras.layers.Dense(8)(input1)
x2 = tf.keras.layers.Dense(8)(input2)

#merge two layers (x1 x2)and add weight to a layer of them
concatted = tf.keras.layers.Concatenate()([ w * x1 , x2 ])