在 scikit-optimize 中使用 KerasRegressor 的示例
Example of using a KerasRegressor in scikit-optimize
我正在使用超棒的 scikit-optimize 工具箱进行超参数优化。
我的目标是比较 keras 和 scikit-learn 模型。
根据示例 https://scikit-optimize.github.io/stable/auto_examples/sklearn-gridsearchcv-replacement.html#sphx-glr-auto-examples-sklearn-gridsearchcv-replacement-py 仅使用了 scikit 学习模型。尝试类似下面的代码不允许在 BayesSearchCV 中集成 keras 模式。
# Function to create model, required for KerasRegressor
def create_model(optimizer='rmsprop', init='glorot_uniform'):
# create model
model = Sequential()
model.add(Dense(12, input_dim=8, kernel_initializer=init, activation='relu'))
model.add(Dense(8, kernel_initializer=init, activation='relu'))
model.add(Dense(1, kernel_initializer=init, activation='linear'))
# Compile model
model.compile(loss='mse', optimizer=optimizer, metrics=['r2'])
return model
model = KerasRegressor(build_fn=create_model, verbose=0)
NN_search = {
'model': [model()],
'model__optimizers': optimizers,
'model__epochs' : epochs,
'model__batch_size' : batches,
'model__init' : init
}
有没有人设法将 KerasClassifier/Regressor 合并到 BayesSearch CV 中?
好吧,我找到了一个选项来定义一个基于全局参数构建的模型。因此在 scikit-opt 最小化函数内部调用了 objective 函数,这里设置了全局参数,并在 create_model_NN 函数中使用,该函数基于 keras scikit-learn KerasRegressor Wrapper。
def create_model_NN():
#start the model making process and create our first layer
model = Sequential()
model.add(Dense(num_input_nodes, input_shape=(40,), activation=activation
))
#create a loop making a new dense layer for the amount passed to this model.
#naming the layers helps avoid tensorflow error deep in the stack trace.
for i in range(num_dense_layers):
name = 'layer_dense_{0}'.format(i+1)
model.add(Dense(num_dense_nodes,
activation=activation,
name=name
))
#add our classification layer.
model.add(Dense(1,activation='linear'))
#setup our optimizer and compile
adam = Adam(lr=learning_rate)
model.compile(optimizer=adam, loss='mean_squared_error',
metrics=['mse'])
return model
def objective_NN(**params):
print(params)
global learning_rate
learning_rate=params["learning_rate"]
global num_dense_layers
num_dense_layers=params["num_dense_layers"]
global num_input_nodes
num_input_nodes=params["num_input_nodes"]
global num_dense_nodes
num_dense_nodes=params["num_dense_nodes"]
global activation
activation=params["activation"]
model = KerasRegressor(build_fn=create_model, epochs=100, batch_size=1000, verbose=0)
X_train, X_test, y_train, y_test = train_test_split(X_time, y_time, test_size=0.33, random_state=42)
model.fit(X_train, y_train)
y_pr = model.predict(X_test)
res = metrics.r2_score(y_test, y_pr)
return -res
并调用它:
res_gp = gp_minimize(objective_NN, space_NN, n_calls=10, random_state=0)
我正在使用超棒的 scikit-optimize 工具箱进行超参数优化。 我的目标是比较 keras 和 scikit-learn 模型。
根据示例 https://scikit-optimize.github.io/stable/auto_examples/sklearn-gridsearchcv-replacement.html#sphx-glr-auto-examples-sklearn-gridsearchcv-replacement-py 仅使用了 scikit 学习模型。尝试类似下面的代码不允许在 BayesSearchCV 中集成 keras 模式。
# Function to create model, required for KerasRegressor
def create_model(optimizer='rmsprop', init='glorot_uniform'):
# create model
model = Sequential()
model.add(Dense(12, input_dim=8, kernel_initializer=init, activation='relu'))
model.add(Dense(8, kernel_initializer=init, activation='relu'))
model.add(Dense(1, kernel_initializer=init, activation='linear'))
# Compile model
model.compile(loss='mse', optimizer=optimizer, metrics=['r2'])
return model
model = KerasRegressor(build_fn=create_model, verbose=0)
NN_search = {
'model': [model()],
'model__optimizers': optimizers,
'model__epochs' : epochs,
'model__batch_size' : batches,
'model__init' : init
}
有没有人设法将 KerasClassifier/Regressor 合并到 BayesSearch CV 中?
好吧,我找到了一个选项来定义一个基于全局参数构建的模型。因此在 scikit-opt 最小化函数内部调用了 objective 函数,这里设置了全局参数,并在 create_model_NN 函数中使用,该函数基于 keras scikit-learn KerasRegressor Wrapper。
def create_model_NN():
#start the model making process and create our first layer
model = Sequential()
model.add(Dense(num_input_nodes, input_shape=(40,), activation=activation
))
#create a loop making a new dense layer for the amount passed to this model.
#naming the layers helps avoid tensorflow error deep in the stack trace.
for i in range(num_dense_layers):
name = 'layer_dense_{0}'.format(i+1)
model.add(Dense(num_dense_nodes,
activation=activation,
name=name
))
#add our classification layer.
model.add(Dense(1,activation='linear'))
#setup our optimizer and compile
adam = Adam(lr=learning_rate)
model.compile(optimizer=adam, loss='mean_squared_error',
metrics=['mse'])
return model
def objective_NN(**params):
print(params)
global learning_rate
learning_rate=params["learning_rate"]
global num_dense_layers
num_dense_layers=params["num_dense_layers"]
global num_input_nodes
num_input_nodes=params["num_input_nodes"]
global num_dense_nodes
num_dense_nodes=params["num_dense_nodes"]
global activation
activation=params["activation"]
model = KerasRegressor(build_fn=create_model, epochs=100, batch_size=1000, verbose=0)
X_train, X_test, y_train, y_test = train_test_split(X_time, y_time, test_size=0.33, random_state=42)
model.fit(X_train, y_train)
y_pr = model.predict(X_test)
res = metrics.r2_score(y_test, y_pr)
return -res
并调用它:
res_gp = gp_minimize(objective_NN, space_NN, n_calls=10, random_state=0)