keras.callbacks TensorBoard 不显示纪元
keras.callbacks with TensorBoard is not displaying epochs
我正在使用 TensorFlow 和波士顿房价数据集。在尝试使用 TensorBoard 显示我的纪元时,我遇到了没有输出的问题。
我尝试按照文档中所示实现它 https://www.tensorflow.org/tensorboard/get_started
我使用 google 笔记本作为我的环境。
%load_ext tensorboard
from __future__ import absolute_import, division, print_function, unicode_literals
import pathlib
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from datetime import datetime
try:
# %tensorflow_version only exists in Colab.
%tensorflow_version 2.x
except Exception:
pass
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# print(tf.__version__)
import pandas as pd
from sklearn.datasets import load_boston
boston = load_boston()
# clear log from previous runs
!rm -rf ./logs/
# Next, we load the data into a 'dataframe' object for easier manipulation, and also print the first few rows in order to examine it
data = pd.DataFrame(boston.data, columns=boston.feature_names)
data['MEDV'] = pd.Series(data=boston.target, index=data.index)
train_dataset = data.sample(frac=0.7,random_state=0)
test_dataset = data.drop(train_dataset.index)
train_stats = train_dataset.describe()
train_stats.pop("MEDV")
train_stats = train_stats.transpose()
train_stats
train_labels = train_dataset.pop('MEDV')
test_labels = test_dataset.pop('MEDV')
def norm(x):
return (x - train_stats['mean']) / train_stats['std']
normed_train_data = norm(train_dataset)
normed_test_data = norm(test_dataset)
def build_model():
model = keras.Sequential([
layers.Dense(64, activation='relu', input_shape=[len(train_dataset.keys())]),
layers.Dense(64, activation='relu'),
layers.Dense(1)
])
optimizer = tf.keras.optimizers.RMSprop(0.001)
model.compile(loss='mse',
optimizer=optimizer,
metrics=['mae', 'mse'])
return model
model = build_model();
example_batch = normed_train_data[:10]
example_result = model.predict(example_batch)
# Display training progress by printing a single dot for each completed epoch
# not using this for now
class PrintDot(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs):
if epoch % 100 == 0: print('')
print('.', end='')
EPOCHS = 1000
# Define the Keras TensorBoard callback.
# logdir="logs/fit/" + datetime.now().strftime("%Y%m%d-%H%M%S")
# tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir) # this didn't work aswell
tensorboard_callback = tf.keras.callbacks.TensorBoard(
log_dir='logs', histogram_freq=0, write_graph=True, write_images=False,
update_freq='epoch', profile_batch=2, embeddings_freq=0,
embeddings_metadata=None
)
history = model.fit(
normed_train_data, train_labels,
epochs=EPOCHS, validation_split = 0.2, verbose=0,
# callbacks=[PrintDot()])
callbacks=[tensorboard_callback])
输出:
*nothing*
我想要的输出应该与此类似:
Epoch 1/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.2199 - accuracy: 0.9352 - val_loss: 0.1205 - val_accuracy: 0.9626
Epoch 2/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0976 - accuracy: 0.9705 - val_loss: 0.0835 - val_accuracy: 0.9761
Epoch 3/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0690 - accuracy: 0.9784 - val_loss: 0.0687 - val_accuracy: 0.9782
Epoch 4/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0528 - accuracy: 0.9831 - val_loss: 0.0695 - val_accuracy: 0.9786
Epoch 5/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0437 - accuracy: 0.9853 - val_loss: 0.0652 - val_accuracy: 0.9795
<tensorflow.python.keras.callbacks.History at 0x7f5c3d1ce828>
在model.fit()
中设置你的verbose=1
,它将显示每个时期的数据。
0 = 无声,1 = 进度条,2 = 每个时期一行。
我正在使用 TensorFlow 和波士顿房价数据集。在尝试使用 TensorBoard 显示我的纪元时,我遇到了没有输出的问题。
我尝试按照文档中所示实现它 https://www.tensorflow.org/tensorboard/get_started
我使用 google 笔记本作为我的环境。
%load_ext tensorboard
from __future__ import absolute_import, division, print_function, unicode_literals
import pathlib
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
from datetime import datetime
try:
# %tensorflow_version only exists in Colab.
%tensorflow_version 2.x
except Exception:
pass
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# print(tf.__version__)
import pandas as pd
from sklearn.datasets import load_boston
boston = load_boston()
# clear log from previous runs
!rm -rf ./logs/
# Next, we load the data into a 'dataframe' object for easier manipulation, and also print the first few rows in order to examine it
data = pd.DataFrame(boston.data, columns=boston.feature_names)
data['MEDV'] = pd.Series(data=boston.target, index=data.index)
train_dataset = data.sample(frac=0.7,random_state=0)
test_dataset = data.drop(train_dataset.index)
train_stats = train_dataset.describe()
train_stats.pop("MEDV")
train_stats = train_stats.transpose()
train_stats
train_labels = train_dataset.pop('MEDV')
test_labels = test_dataset.pop('MEDV')
def norm(x):
return (x - train_stats['mean']) / train_stats['std']
normed_train_data = norm(train_dataset)
normed_test_data = norm(test_dataset)
def build_model():
model = keras.Sequential([
layers.Dense(64, activation='relu', input_shape=[len(train_dataset.keys())]),
layers.Dense(64, activation='relu'),
layers.Dense(1)
])
optimizer = tf.keras.optimizers.RMSprop(0.001)
model.compile(loss='mse',
optimizer=optimizer,
metrics=['mae', 'mse'])
return model
model = build_model();
example_batch = normed_train_data[:10]
example_result = model.predict(example_batch)
# Display training progress by printing a single dot for each completed epoch
# not using this for now
class PrintDot(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs):
if epoch % 100 == 0: print('')
print('.', end='')
EPOCHS = 1000
# Define the Keras TensorBoard callback.
# logdir="logs/fit/" + datetime.now().strftime("%Y%m%d-%H%M%S")
# tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir) # this didn't work aswell
tensorboard_callback = tf.keras.callbacks.TensorBoard(
log_dir='logs', histogram_freq=0, write_graph=True, write_images=False,
update_freq='epoch', profile_batch=2, embeddings_freq=0,
embeddings_metadata=None
)
history = model.fit(
normed_train_data, train_labels,
epochs=EPOCHS, validation_split = 0.2, verbose=0,
# callbacks=[PrintDot()])
callbacks=[tensorboard_callback])
输出:
*nothing*
我想要的输出应该与此类似:
Epoch 1/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.2199 - accuracy: 0.9352 - val_loss: 0.1205 - val_accuracy: 0.9626
Epoch 2/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0976 - accuracy: 0.9705 - val_loss: 0.0835 - val_accuracy: 0.9761
Epoch 3/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0690 - accuracy: 0.9784 - val_loss: 0.0687 - val_accuracy: 0.9782
Epoch 4/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0528 - accuracy: 0.9831 - val_loss: 0.0695 - val_accuracy: 0.9786
Epoch 5/5
1875/1875 [==============================] - 8s 4ms/step - loss: 0.0437 - accuracy: 0.9853 - val_loss: 0.0652 - val_accuracy: 0.9795
<tensorflow.python.keras.callbacks.History at 0x7f5c3d1ce828>
在model.fit()
中设置你的verbose=1
,它将显示每个时期的数据。
0 = 无声,1 = 进度条,2 = 每个时期一行。