了解 Keras LSTM Tensorboard 图
Understanding Keras LSTM Tensorboard Graph
我对在 Tensborboad 中为我的 Keras LSTM 网络获取的图表感到困惑。我已经这样定义了我的 Keras LSTM 网络:
model = Sequential()
model.add(LSTM(neurons, return_sequences=True,input_shape=(look_back,2)))
#model.add(Bidirectional(LSTM(neurons, return_sequences=True),input_shape=(look_back,2)))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(20,return_sequences=False,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(Dense(outputs,kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),activation='linear'))
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
我认为这会给我一个顺序模型,其中每个 LSTM 都采用前一个 LSTM 的输出。我有点明白了。但是我也得到了一个 LSTM 层作为每个后续层的输入:
在图表中,看起来 lstm_2 馈入每一层。我不会预料到的。所以我的问题是,这是预期的吗?如果是这样,为什么?
谢谢。
我明白为什么会这样显示了。事实证明,Keras 创建了一个 learning_phase 占位符并将其放置在第二个隐藏层中。 learning_phase 对象分支到每一层,但 LSTM 本身没有。我参考 了解更多详情。
这是我的 LSTM_1 层的内部在我的 Tensorboard 图表中的样子:
我对在 Tensborboad 中为我的 Keras LSTM 网络获取的图表感到困惑。我已经这样定义了我的 Keras LSTM 网络:
model = Sequential()
model.add(LSTM(neurons, return_sequences=True,input_shape=(look_back,2)))
#model.add(Bidirectional(LSTM(neurons, return_sequences=True),input_shape=(look_back,2)))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(neurons,return_sequences=True,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(LSTM(20,return_sequences=False,recurrent_regularizer=l2(weight_decay),
kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),dropout=dropout,recurrent_dropout=dropout))
model.add(Dense(outputs,kernel_regularizer=l2(weight_decay),bias_regularizer=l2(weight_decay),activation='linear'))
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
我认为这会给我一个顺序模型,其中每个 LSTM 都采用前一个 LSTM 的输出。我有点明白了。但是我也得到了一个 LSTM 层作为每个后续层的输入:
在图表中,看起来 lstm_2 馈入每一层。我不会预料到的。所以我的问题是,这是预期的吗?如果是这样,为什么?
谢谢。
我明白为什么会这样显示了。事实证明,Keras 创建了一个 learning_phase 占位符并将其放置在第二个隐藏层中。 learning_phase 对象分支到每一层,但 LSTM 本身没有。我参考
这是我的 LSTM_1 层的内部在我的 Tensorboard 图表中的样子: