0

Does anyone know if it is possible to somehow calculate metrics other than accuracy in HyperOpt? I would also like it to display me F1, precision, recall. Is there any option to do it? If so could someone please explain it to me.

def objective(space):
    pipe_params = {}
    
    for s in space:
        pipe_params[f"classifier__{s}"] = space[s]
        
    pipe.set_params(**pipe_params)
    score = cross_val_score(pipe, X_train, y_train, cv=10, scoring="accuracy",n_jobs=-1).mean()
    # Is there an option to add other metrics to the return 
    return {'loss': 1- score, 'status': STATUS_OK, 'accuracy': score}

trials_df = []
 
for cl in classifiers:
    cl_name = cl['class'].__class__.__name__
    print(f"\n\n{cl_name}")
    
    pipe = Pipeline(steps = [
    ('data_processing_pipeline', data_processing_pipeline),
    ('classifier', cl['class'])
    ])
    
    space = {}
    for k in cl['params']:
        space[k] = cl['params'][k]
    
    max_evals = cl['max_evals']
    
    trials = Trials()
    best = fmin(fn=objective,
                    space=space,
                    algo=tpe.suggest,
                    max_evals=max_evals,
                    trials=trials)
     
    best_params = space_eval(space, best)
    print('\nThe best params:')
    print ("{:<30} {}".format('Parameter','Selected'))
    for k, v in best_params.items():
        print ("{:<30} {}".format(k, v))
    
    for trial in trials.trials:
        trials_df.append({
            'classifier': cl_name,
            'loss': trial['result']['loss'],
            'accuracy': trial['result']['accuracy'],
            'params': trial['misc']['vals']
            })

Here is my link to Github If anyone wants to see the whole code: https://github.com/mikolaj-halemba/Water-Quality-/blob/main/water_quality.ipynb

4

1 回答 1

0

试试这些内置功能。

sklearn.metrics import precision_score,recall_score,f1_score

print(precision_score(y_test,y_pred))
print(recall_score(y_test,y_pred))
print(f1_score(y_test,y_pred))
于 2021-11-17T03:34:52.583 回答