TensorFlow 2.0 dataset.__iter__() 仅在启用即时执行时才受支持
TensorFlow 2.0 dataset.__iter__() is only supported when eager execution is enabled
我在 TensorFlow 2 中使用以下自定义训练代码:
def parse_function(filename, filename2):
image = read_image(fn)
def ret1(): return image, read_image(fn2), 0
def ret2(): return image, preprocess(image), 1
return tf.case({tf.less(tf.random.uniform([1])[0], tf.constant(0.5)): ret2}, default=ret1)
dataset = tf.data.Dataset.from_tensor_slices((train,shuffled_train))
dataset = dataset.shuffle(len(train))
dataset = dataset.map(parse_function, num_parallel_calls=4)
dataset = dataset.batch(1)
dataset = dataset.prefetch(buffer_size=4)
@tf.function
def train(model, dataset, optimizer):
for x1, x2, y in enumerate(dataset):
with tf.GradientTape() as tape:
left, right = model([x1, x2])
loss = contrastive_loss(left, right, tf.cast(y, tf.float32))
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
siamese_net.compile(optimizer=tf.keras.optimizers.RMSprop(learning_rate=1e-3))
train(siamese_net, dataset, tf.keras.optimizers.RMSprop(learning_rate=1e-3))
此代码给出错误:
dataset.__iter__() is only supported when eager execution is enabled.
但是,它在 TensorFlow 2.0 中默认启用。
tf.executing_eagerly()
也 returns 'True'.
我通过将 train 函数更改为以下内容来解决此问题:
def train(model, dataset, optimizer):
for step, (x1, x2, y) in enumerate(dataset):
with tf.GradientTape() as tape:
left, right = model([x1, x2])
loss = contrastive_loss(left, right, tf.cast(y, tf.float32))
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
两个更改是删除@tf.function 和修复枚举。
我通过在导入 tensorflow 后启用 eager execution 来修复它:
import tensorflow as tf
tf.enable_eager_execution()
参考:Tensorflow
如果您在
之后使用 Jupyter notebook
import tensorflow as tf
tf.enable_eager_execution()
你需要重启内核才能正常工作
我在 TensorFlow 2 中使用以下自定义训练代码:
def parse_function(filename, filename2):
image = read_image(fn)
def ret1(): return image, read_image(fn2), 0
def ret2(): return image, preprocess(image), 1
return tf.case({tf.less(tf.random.uniform([1])[0], tf.constant(0.5)): ret2}, default=ret1)
dataset = tf.data.Dataset.from_tensor_slices((train,shuffled_train))
dataset = dataset.shuffle(len(train))
dataset = dataset.map(parse_function, num_parallel_calls=4)
dataset = dataset.batch(1)
dataset = dataset.prefetch(buffer_size=4)
@tf.function
def train(model, dataset, optimizer):
for x1, x2, y in enumerate(dataset):
with tf.GradientTape() as tape:
left, right = model([x1, x2])
loss = contrastive_loss(left, right, tf.cast(y, tf.float32))
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
siamese_net.compile(optimizer=tf.keras.optimizers.RMSprop(learning_rate=1e-3))
train(siamese_net, dataset, tf.keras.optimizers.RMSprop(learning_rate=1e-3))
此代码给出错误:
dataset.__iter__() is only supported when eager execution is enabled.
但是,它在 TensorFlow 2.0 中默认启用。
tf.executing_eagerly()
也 returns 'True'.
我通过将 train 函数更改为以下内容来解决此问题:
def train(model, dataset, optimizer):
for step, (x1, x2, y) in enumerate(dataset):
with tf.GradientTape() as tape:
left, right = model([x1, x2])
loss = contrastive_loss(left, right, tf.cast(y, tf.float32))
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
两个更改是删除@tf.function 和修复枚举。
我通过在导入 tensorflow 后启用 eager execution 来修复它:
import tensorflow as tf
tf.enable_eager_execution()
参考:Tensorflow
如果您在
之后使用 Jupyter notebookimport tensorflow as tf
tf.enable_eager_execution()
你需要重启内核才能正常工作