如何打印 Keras 张量值?
How to print Keras tensor values?
我想打印 LSTM 层的状态值。
class CustomCallback(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
encoder_outputs, state_h, state_c = self.model.layers[1].output
print(state_h)
print(state_c)
正在打印这样的东西
32/32 - 0s - loss: 39.6719 - accuracy: 0.2420
KerasTensor(type_spec=TensorSpec(shape=(None, 5), dtype=tf.float32, name=None), name='lstm_5/PartitionedCall:3', description="created by layer 'lstm_5'")
Epoch 5/20000
32/32 - 0s - loss: 39.6549 - accuracy: 0.2420
KerasTensor(type_spec=TensorSpec(shape=(None, 5), dtype=tf.float32, name=None), name='lstm_5/PartitionedCall:3', description="created by layer 'lstm_5'")
如何打印张量的真实值?
您必须向 Callback
函数内的 LSTM
层提供一些数据才能看到一些实际值:
import tensorflow as tf
class CustomCallback(tf.keras.callbacks.Callback):
def __init__(self, data, sample_size):
self.data = data
self.sample_size = sample_size
def on_epoch_end(self, epoch, logs=None):
encoder_outputs, state_h, state_c = lstm_layer(self.data[:self.sample_size])
tf.print('state_h --> ', state_h)
tf.print('state_c --> ', state_c)
inputs = tf.keras.layers.Input((5, 10))
x, _, _ = tf.keras.layers.LSTM(32, return_state=True, return_sequences=True)(inputs)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(units=1)(x)
model = tf.keras.Model(inputs, x)
model.compile(loss=tf.keras.losses.BinaryCrossentropy())
lstm_layer = model.layers[1]
x_train = tf.random.normal((10, 5, 10))
x_test = tf.random.normal((10, 5, 10))
model.fit(x_train, tf.random.uniform((10, 1), maxval=2), epochs=2, callbacks=[CustomCallback(x_test, 1)], batch_size=2)
Epoch 1/2
5/5 [==============================] - 2s 5ms/step - loss: 12.2492
state_h --> [[-0.157256633 -0.0619691685 0.102620631 ... 0.0852451548 -0.0657120794 -0.201934695]]
state_c --> [[-0.316935241 -0.157902092 0.184583426 ... 0.196862131 -0.134880155 -0.467693359]]
Epoch 2/2
5/5 [==============================] - 0s 4ms/step - loss: 11.8095
state_h --> [[-0.15817374 -0.0611076616 0.103141323 ... 0.0845508352 -0.0648964494 -0.201082334]]
state_c --> [[-0.319411457 -0.156104326 0.186640084 ... 0.194445729 -0.13365829 -0.464410305]]
<keras.callbacks.History at 0x7f9baadc8b90>
请注意,我创建了一个 x_test
张量,但您也可以只将 x_train
提供给您的回调。 lstm_layer
根据您的训练进度保存当前权重。您可以通过在 Callback
函数中打印图层权重来验证这一点:tf.print(lstm_layer.get_weights())
.
我想打印 LSTM 层的状态值。
class CustomCallback(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
encoder_outputs, state_h, state_c = self.model.layers[1].output
print(state_h)
print(state_c)
正在打印这样的东西
32/32 - 0s - loss: 39.6719 - accuracy: 0.2420
KerasTensor(type_spec=TensorSpec(shape=(None, 5), dtype=tf.float32, name=None), name='lstm_5/PartitionedCall:3', description="created by layer 'lstm_5'")
Epoch 5/20000
32/32 - 0s - loss: 39.6549 - accuracy: 0.2420
KerasTensor(type_spec=TensorSpec(shape=(None, 5), dtype=tf.float32, name=None), name='lstm_5/PartitionedCall:3', description="created by layer 'lstm_5'")
如何打印张量的真实值?
您必须向 Callback
函数内的 LSTM
层提供一些数据才能看到一些实际值:
import tensorflow as tf
class CustomCallback(tf.keras.callbacks.Callback):
def __init__(self, data, sample_size):
self.data = data
self.sample_size = sample_size
def on_epoch_end(self, epoch, logs=None):
encoder_outputs, state_h, state_c = lstm_layer(self.data[:self.sample_size])
tf.print('state_h --> ', state_h)
tf.print('state_c --> ', state_c)
inputs = tf.keras.layers.Input((5, 10))
x, _, _ = tf.keras.layers.LSTM(32, return_state=True, return_sequences=True)(inputs)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(units=1)(x)
model = tf.keras.Model(inputs, x)
model.compile(loss=tf.keras.losses.BinaryCrossentropy())
lstm_layer = model.layers[1]
x_train = tf.random.normal((10, 5, 10))
x_test = tf.random.normal((10, 5, 10))
model.fit(x_train, tf.random.uniform((10, 1), maxval=2), epochs=2, callbacks=[CustomCallback(x_test, 1)], batch_size=2)
Epoch 1/2
5/5 [==============================] - 2s 5ms/step - loss: 12.2492
state_h --> [[-0.157256633 -0.0619691685 0.102620631 ... 0.0852451548 -0.0657120794 -0.201934695]]
state_c --> [[-0.316935241 -0.157902092 0.184583426 ... 0.196862131 -0.134880155 -0.467693359]]
Epoch 2/2
5/5 [==============================] - 0s 4ms/step - loss: 11.8095
state_h --> [[-0.15817374 -0.0611076616 0.103141323 ... 0.0845508352 -0.0648964494 -0.201082334]]
state_c --> [[-0.319411457 -0.156104326 0.186640084 ... 0.194445729 -0.13365829 -0.464410305]]
<keras.callbacks.History at 0x7f9baadc8b90>
请注意,我创建了一个 x_test
张量,但您也可以只将 x_train
提供给您的回调。 lstm_layer
根据您的训练进度保存当前权重。您可以通过在 Callback
函数中打印图层权重来验证这一点:tf.print(lstm_layer.get_weights())
.