0

我正在使用 scikit-learn 优化包来调整模型的超参数。出于性能和可读性的原因(我正在使用相同的过程训练多个模型),我想在一个类中构建整个超参数调整:

...
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.keras.preprocessing import sequence
from tensorflow.keras.models import Sequential, load_model
from tensorflow.keras.layers import InputLayer, Input, Dense, Embedding, BatchNormalization, Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import TensorBoard, EarlyStopping
from sklearn.preprocessing import MinMaxScaler, OneHotEncoder
from sklearn.model_selection import train_test_split

import skopt
from skopt import gp_minimize
from skopt.space import Real, Categorical, Integer
from skopt.plots import plot_convergence
from skopt.plots import plot_objective, plot_evaluations
from skopt.utils import use_named_args

class hptuning:
   def __init__(self, input_df):
         self.inp_df = input_df
         self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(...)
         self.param_space = self.dim_hptuning()
         self.best_loss = 10000

   def dim_hptuning(self):
         dim_layers = Integer(low=0, high=7, name='layers')
         dim_nodes = Integer(low=2, high=90, name='num_nodes')
         dimensions = [dim_layers, dim_nodes]
         return dimensions

   def create_model(self, layers, nodes):
         model = Sequential()
         for layer in range(layers):
             model.add(Dense(nodes))
         model.add(Dense(1,activation='sigmoid'))
         optimizer = Adam
         model.compile(loss='mean_absolute_error',
                  optimizer=optimizer,
                  metrics=['mae', 'mse'])
         return model
         
   @use_named_args(dimensions=self.param_space)
   def fitness(self,nodes, layers):
         model = self.create_model(layers=layers, nodes=nodes)
         history = model.fit(x=self.X_train.values,y=self.y_train.values,epochs=200,batch_size=200,verbose=0)
         loss = history.history['val_loss'][-1]
         if loss < self.best_loss:
             model.save('model.h5')
             self.best_loss = loss
         del model
         K.clear_session()
         return loss

   def find_best_model(self):
         search_result = gp.minimize(func=self.fitness, dimensions=self.param_space,acq_func='EI',n_calls=10)
         return search_result
hptun = hptuning(input_df=df)
search_result = hptun.find_best_model()
print(search_result.fun)

现在我得到了装饰器@use_named_args 没有在一个类中正常工作的问题(scikit-optimize 的示例代码)。我收到错误消息

Traceback (most recent call last):
File "main.py", line 138, in <module>
class hptuning:
File "main.py", line 220, in hptuning
@use_named_args(dimensions=self.param_space)
NameError: name 'self' is not defined

这显然是关于在这种情况下对装饰器的滥用。

可能是由于我对此类装饰器的功能缺乏了解,我无法运行它。有人可以帮我解决这个问题吗?

提前感谢大家的支持!

4

1 回答 1

1

self未定义的问题与 scikit.learn 无关。您不能使用self来定义装饰器,因为它仅在您正在装饰的方法中定义。但即使你回避这个问题(例如通过提供 param_space 作为全局变量),我预计下一个问题将self是传递给use_named_args装饰器,但它只希望优化参数。

最明显的解决方案是不在方法上使用装饰器,而是在方法内部fitness定义一个调用该方法的装饰函数,如下所示:fitnessfind_best_model

   def find_best_model(self):
         @use_named_args(dimensions=self.param_space)
         def fitness_wrapper(*args, **kwargs):
             return self.fitness(*args, **kwargs)
         search_result = gp.minimize(func=fitness_wrapper, dimensions=self.param_space,acq_func='EI',n_calls=10)
         return search_result

于 2020-11-05T13:45:00.903 回答