使用 Lambda 创建自定义 Keras Layer 对象
Create custom Keras Layer object using Lambda
我想构建一个自定义 Keras 层来保持 k 个最高激活值。我目前正在这样做(并且工作正常):
def max_topk_pool(x,k):
import tensorflow as tf
k_max = tf.nn.top_k(x,k=k,sorted=True,name=None)
return k_max
def KMax(k):
return Lambda(max_topk_pool,
arguments={'k':k},
output_shape=lambda x: (None, k))
你知道有没有办法按照 Keras 在 https://keras.io/layers/writing-your-own-keras-layers/
中显示的方式构建自定义层 class "KMax"
from keras import backend as K
from keras.layers import Layer
class MyLayer(Layer):
def __init__(self, output_dim, **kwargs):
self.output_dim = output_dim
super(MyLayer, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
self.kernel = self.add_weight(name='kernel',
shape=(input_shape[1], self.output_dim),
initializer='uniform',
trainable=True)
super(MyLayer, self).build(input_shape) # Be sure to call this at the end
def call(self, x):
return K.dot(x, self.kernel)
def compute_output_shape(self, input_shape):
return (input_shape[0], self.output_dim)
我想要这样的东西:
from keras import backend as K
from keras.layers import Layer
class KMax(Layer):
def __init__(self, output_dim, **kwargs):
self.K = K
super(MyLayer, self).__init__(**kwargs)
def build(self, input_shape):
<... Lambda here ?>
def compute_output_shape(self, input_shape):
return (input_shape[0], self.K)
非常感谢!
这是您需要的(基于https://github.com/keras-team/keras/issues/373):
from keras.engine import Layer, InputSpec
from keras.layers import Flatten
import tensorflow as tf
# https://github.com/keras-team/keras/issues/373
class KMaxPooling(Layer):
"""
K-max pooling layer that extracts the k-highest activations from a sequence (2nd dimension).
TensorFlow backend.
"""
def __init__(self, k=1, **kwargs):
super().__init__(**kwargs)
self.input_spec = InputSpec(ndim=3)
self.k = k
def compute_output_shape(self, input_shape):
return input_shape[0], (input_shape[2] * self.k)
def call(self, inputs):
# swap last two dimensions since top_k will be applied along the last dimension
shifted_input = tf.transpose(inputs, [0, 2, 1])
# extract top_k, returns two tensors [values, indices]
top_k = tf.nn.top_k(shifted_input, k=self.k, sorted=True, name=None)[0]
# return flattened output
return Flatten()(top_k)
def get_config(self):
config = {'k': self.k}
base_config = super().get_config()
return {**base_config, **config}
我想构建一个自定义 Keras 层来保持 k 个最高激活值。我目前正在这样做(并且工作正常):
def max_topk_pool(x,k):
import tensorflow as tf
k_max = tf.nn.top_k(x,k=k,sorted=True,name=None)
return k_max
def KMax(k):
return Lambda(max_topk_pool,
arguments={'k':k},
output_shape=lambda x: (None, k))
你知道有没有办法按照 Keras 在 https://keras.io/layers/writing-your-own-keras-layers/
中显示的方式构建自定义层 class "KMax"from keras import backend as K
from keras.layers import Layer
class MyLayer(Layer):
def __init__(self, output_dim, **kwargs):
self.output_dim = output_dim
super(MyLayer, self).__init__(**kwargs)
def build(self, input_shape):
# Create a trainable weight variable for this layer.
self.kernel = self.add_weight(name='kernel',
shape=(input_shape[1], self.output_dim),
initializer='uniform',
trainable=True)
super(MyLayer, self).build(input_shape) # Be sure to call this at the end
def call(self, x):
return K.dot(x, self.kernel)
def compute_output_shape(self, input_shape):
return (input_shape[0], self.output_dim)
我想要这样的东西:
from keras import backend as K
from keras.layers import Layer
class KMax(Layer):
def __init__(self, output_dim, **kwargs):
self.K = K
super(MyLayer, self).__init__(**kwargs)
def build(self, input_shape):
<... Lambda here ?>
def compute_output_shape(self, input_shape):
return (input_shape[0], self.K)
非常感谢!
这是您需要的(基于https://github.com/keras-team/keras/issues/373):
from keras.engine import Layer, InputSpec
from keras.layers import Flatten
import tensorflow as tf
# https://github.com/keras-team/keras/issues/373
class KMaxPooling(Layer):
"""
K-max pooling layer that extracts the k-highest activations from a sequence (2nd dimension).
TensorFlow backend.
"""
def __init__(self, k=1, **kwargs):
super().__init__(**kwargs)
self.input_spec = InputSpec(ndim=3)
self.k = k
def compute_output_shape(self, input_shape):
return input_shape[0], (input_shape[2] * self.k)
def call(self, inputs):
# swap last two dimensions since top_k will be applied along the last dimension
shifted_input = tf.transpose(inputs, [0, 2, 1])
# extract top_k, returns two tensors [values, indices]
top_k = tf.nn.top_k(shifted_input, k=self.k, sorted=True, name=None)[0]
# return flattened output
return Flatten()(top_k)
def get_config(self):
config = {'k': self.k}
base_config = super().get_config()
return {**base_config, **config}