0

如何从 BayesSearchCV 保存和加载模型。我想将它保存到我的本地计算机并将其加载到 Kaggle 中,但是我尝试了 keras 命令 model.save() 和 skopt dump() {正确的方式},但都没有保存我的模型。任何帮助深表感谢!

功能:

search_space = dict()
search_space["epochs"] = Integer(125, 155, 'uniform')
search_space["batch_size"] = Integer(165, 220, 'uniform')

#define model
def buildmodel1(optimizer = 'adam'):
    model = Sequential()
    model.add(Dense(784, activation='selu',input_dim=784, kernel_initializer="he_normal", use_bias = True))
    layers.Dropout(rate=0.4)
    model.add(Dense(150, activation='selu', kernel_initializer="he_normal", use_bias = True))
    layers.Dropout(rate=0.25)
    model.add(Dense(150, activation='selu', kernel_initializer="he_normal"))
    layers.Dropout(rate=0.25)
    model.add(Dense(150, activation='selu', kernel_initializer="he_normal"))
    layers.Dropout(rate=0.25)
    model.add(Dense(150, activation='selu', kernel_initializer="he_normal"))
    layers.Dropout(rate=0.25)
    model.add(Dense(150, activation='selu', kernel_initializer="he_normal"))
    layers.Dropout(rate=0.25)
    model.add(Dense(10, activation='softmax', kernel_initializer="he_normal"))
    model.compile(loss='sparse_categorical_crossentropy', optimizer='adam',  metrics=['accuracy']) 
    
    return(model)

def bayes_search(filepath, model):
    checkpoint_callback = skopt.callbacks.CheckpointSaver(filepath)
    # create clf and fit 
    estimator= KerasClassifier (build_fn=model,  verbose=0) 
    bayes = BayesSearchCV(estimator, search_spaces=search_space, cv=3)
    fit_CV = bayes.fit(X_train, y_train, callback=[checkpoint_callback])
    
    print(fit_CV.best_params_)
    return fit_CV, bayes

调用函数

from skopt import dump, load
filepath = 'my_best_model1.hdf5'
model1_bayesCV, cv_instance = bayes_search(filepath, buildmodel1)
dump(model1_bayesCV, 'result.pkl')

我收到 TypeError: can't pickle _thread.RLock objects

如果有帮助,很高兴发布令人难以置信的长完整堆栈跟踪,但顶部和底部三个部分是:

最佳:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-187-74d6f027bfc0> in <module>
      1 from skopt import dump, load
      2 
----> 3 dump(model1_bayesCV, 'result.pkl')

~\anaconda3\envs\tflow2\lib\site-packages\skopt\utils.py in dump(res, filename, store_objective, **kwargs)
    134     """
    135     if store_objective:
--> 136         dump_(res, filename, **kwargs)
    137 
    138     elif 'func' in res.specs['args']:

~\anaconda3\envs\tflow2\lib\site-packages\joblib\numpy_pickle.py in dump(value, filename, compress, protocol, cache_size)
    478     elif is_filename:
    479         with open(filename, 'wb') as f:
--> 480             NumpyPickler(f, protocol=protocol).dump(value)
    481     else:
    482         NumpyPickler(filename, protocol=protocol).dump(value)

底部:

~\anaconda3\envs\tflow2\lib\pickle.py in _batch_setitems(self, items)
    883                 for k, v in tmp:
    884                     save(k)
--> 885                     save(v)
    886                 write(SETITEMS)
    887             elif n:

~\anaconda3\envs\tflow2\lib\site-packages\joblib\numpy_pickle.py in save(self, obj)
    280             return
    281 
--> 282         return Pickler.save(self, obj)
    283 
    284 

~\anaconda3\envs\tflow2\lib\pickle.py in save(self, obj, save_persistent_id)
    522             reduce = getattr(obj, "__reduce_ex__", None)
    523             if reduce is not None:
--> 524                 rv = reduce(self.proto)
    525             else:
    526                 reduce = getattr(obj, "__reduce__", None)

TypeError: can't pickle _thread.RLock objects
4

0 回答 0