在训练/动态正则化期间更改 keras 正则化器
Change keras regularizer during training / dynamic regularization
我正在寻找一种合适的方法来在训练期间对层的权重进行动态正则化。作为 10 次调用后的示例,我想仅针对 MyLayer 的特定权重将 L2 正则化替换为 L1 正则化。下面是图层实现的例子:
class MyLayer(tf.keras.layers.Layer):
def __init__(...)
some code
def build(self, input_shape):
self.regularizer = tf.keras.regularizers.L2() # this regularization should be changed after some steps
self.my_weights = self.add_weight(name='myweights', shape=(self.input_dim, ),
initializer=tf.keras.initializers.Constant(1,),
regularizer= self.regularizer, trainable=True)
self.counter = tf.Variable(0, dtype=tf.int32)
...
def call(self, inputs):
... do some processing ...
# for the following code i look for a proper implementation
if self.counter > 10:
self.regularizer = tf.keras.regularizers.L1()
tf.keras.backend.update(self.counter,self.counter+1)
我们可以形成你的问题而不是从L1->L2, but form it as change the L1L2正则化器的参数改变正则化器,所以我们原则上可以改变正则化器
并且我们无法在编译模型后更改超参数(如果你custom the training就可以),因为编译时超参数已内置到训练函数中
我们想在训练过程中修改超参数,方法是在训练函数中使用后端变量并在训练过程中更新这些变量
因此我们可以定义以下自定义正则化器:
class L1L2_m(Regularizer):
"""Regularizer for L1 and L2 regularization.
# Arguments
l1: Float; L1 regularization factor.
l2: Float; L2 regularization factor.
"""
def __init__(self, l1=0.0, l2=0.01):
with K.name_scope(self.__class__.__name__):
self.l1 = K.variable(l1,name='l1')
self.l2 = K.variable(l2,name='l2')
self.val_l1 = l1
self.val_l2 = l2
def set_l1_l2(self,l1,l2):
K.set_value(self.l1,l1)
K.set_value(self.l2,l2)
self.val_l1 = l1
self.val_l2 = l2
def __call__(self, x):
regularization = 0.
if self.val_l1 > 0.:
regularization += K.sum(self.l1 * K.abs(x))
if self.val_l2 > 0.:
regularization += K.sum(self.l2 * K.square(x))
return regularization
def get_config(self):
config = {'l1': float(K.get_value(self.l1)),
'l2': float(K.get_value(self.l2))}
return config
添加您的自定义对象,以便在您想要导出模型时重新加载它不会有任何问题:
from keras.utils.generic_utils import get_custom_objects
get_custom_objects().update({ L1L2_m.__name__: L1L2_m })
使用自定义对象 set_l1_l2 方法更新您的变量:
class MyLayer(tf.keras.layers.Layer):
def __init__(...)
some code
def build(self, input_shape):
self.regularizer = L1L2_m() # this regularization should be changed after some steps
self.my_weights = self.add_weight(name='myweights', shape=(self.input_dim, ),
initializer=tf.keras.initializers.Constant(1,),
regularizer= self.regularizer, trainable=True)
self.counter = tf.Variable(0, dtype=tf.int32)
...
def call(self, inputs):
... do some processing ...
# for the following code i look for a proper implementation
if self.counter == 10:
self.regularizer.set_l1_l2(0.01,0.)
tf.keras.backend.update(self.counter,self.counter+1)
参考:
我正在寻找一种合适的方法来在训练期间对层的权重进行动态正则化。作为 10 次调用后的示例,我想仅针对 MyLayer 的特定权重将 L2 正则化替换为 L1 正则化。下面是图层实现的例子:
class MyLayer(tf.keras.layers.Layer):
def __init__(...)
some code
def build(self, input_shape):
self.regularizer = tf.keras.regularizers.L2() # this regularization should be changed after some steps
self.my_weights = self.add_weight(name='myweights', shape=(self.input_dim, ),
initializer=tf.keras.initializers.Constant(1,),
regularizer= self.regularizer, trainable=True)
self.counter = tf.Variable(0, dtype=tf.int32)
...
def call(self, inputs):
... do some processing ...
# for the following code i look for a proper implementation
if self.counter > 10:
self.regularizer = tf.keras.regularizers.L1()
tf.keras.backend.update(self.counter,self.counter+1)
我们可以形成你的问题而不是从L1->L2, but form it as change the L1L2正则化器的参数改变正则化器,所以我们原则上可以改变正则化器
并且我们无法在编译模型后更改超参数(如果你custom the training就可以),因为编译时超参数已内置到训练函数中
我们想在训练过程中修改超参数,方法是在训练函数中使用后端变量并在训练过程中更新这些变量
因此我们可以定义以下自定义正则化器:
class L1L2_m(Regularizer):
"""Regularizer for L1 and L2 regularization.
# Arguments
l1: Float; L1 regularization factor.
l2: Float; L2 regularization factor.
"""
def __init__(self, l1=0.0, l2=0.01):
with K.name_scope(self.__class__.__name__):
self.l1 = K.variable(l1,name='l1')
self.l2 = K.variable(l2,name='l2')
self.val_l1 = l1
self.val_l2 = l2
def set_l1_l2(self,l1,l2):
K.set_value(self.l1,l1)
K.set_value(self.l2,l2)
self.val_l1 = l1
self.val_l2 = l2
def __call__(self, x):
regularization = 0.
if self.val_l1 > 0.:
regularization += K.sum(self.l1 * K.abs(x))
if self.val_l2 > 0.:
regularization += K.sum(self.l2 * K.square(x))
return regularization
def get_config(self):
config = {'l1': float(K.get_value(self.l1)),
'l2': float(K.get_value(self.l2))}
return config
添加您的自定义对象,以便在您想要导出模型时重新加载它不会有任何问题:
from keras.utils.generic_utils import get_custom_objects
get_custom_objects().update({ L1L2_m.__name__: L1L2_m })
使用自定义对象 set_l1_l2 方法更新您的变量:
class MyLayer(tf.keras.layers.Layer):
def __init__(...)
some code
def build(self, input_shape):
self.regularizer = L1L2_m() # this regularization should be changed after some steps
self.my_weights = self.add_weight(name='myweights', shape=(self.input_dim, ),
initializer=tf.keras.initializers.Constant(1,),
regularizer= self.regularizer, trainable=True)
self.counter = tf.Variable(0, dtype=tf.int32)
...
def call(self, inputs):
... do some processing ...
# for the following code i look for a proper implementation
if self.counter == 10:
self.regularizer.set_l1_l2(0.01,0.)
tf.keras.backend.update(self.counter,self.counter+1)
参考: