我遇到了一个奇怪的问题:
我通过hyperopt
'max_depth'
hp.choice('max_depth',range(2,20))
但是我得到了'max_depth' = 0
或1
的结果,这不在[2,20)
的限制之内。为什么?
我错过了什么吗?
谢谢。
错误结果:
{'colsample_bytree': 0.18, 'learning_rate': 0.05, 'max_depth': 1, 'reg_alpha': 3.44, 'reg_lambda': 0.92}
{'colsample_bytree': 0.41, 'learning_rate': 0.09, 'max_depth': 0, 'reg_alpha': 0.14, 'reg_lambda': 3.53}
{'colsample_bytree': 0.71, 'learning_rate': 0.17, 'max_depth': 0, 'reg_alpha': 2.21, 'reg_lambda': 2.82}
def xgb_classifier_tune(params):
obj='binary:logistic' if class_nums==2 else 'multi:softmax'
random.seed(time.time())
xgb_model=xgb.XGBClassifier(
max_depth=params['max_depth'],
colsample_bytree=params['colsample_bytree'],
learning_rate=params['learning_rate'],
reg_alpha=params['reg_alpha'],
reg_lambda=params['reg_lambda'],
objective=obj,
n_estimators=100000,
random_state=random.randint(0,99999),
n_jobs=-1)
if params['max_depth']<2:
return {'loss':999.999,'status': STATUS_FAIL,'info':[0,0,0,{}]}
xgb_model.fit(tune_train_x,tune_train_y,eval_set=[(tune_valid_x,tune_valid_y)],verbose=1,early_stopping_rounds=100) #verbose: 0 (silent), 1 (warning), 2 (info), 3 (debug)
predict_y=xgb_model.predict(tune_valid_x)
f1,mcc,roc_auc,table=get_score(tune_valid_y[y_feature].values,predict_y)
return 'loss':-mcc,'status': STATUS_OK
def xgb_hyper_tune():
mdep=list(range(2,20))
space={'max_depth':hp.choice('max_depth',mdep),
'colsample_bytree':hp.uniform('colsample_bytree',0.1,0.9),
'learning_rate':hp.quniform('learning_rate',0.01,0.2,0.01),
'reg_alpha':hp.uniform('reg_alpha',0.1,6.0),
'reg_lambda':hp.uniform('reg_lambda',0.1,6.0)}
trials=Trials()
best_param=fmin(xgb_classifier_tune,space,algo=tpe.suggest,max_evals=100, trials=trials)
return best_param
答案 0 :(得分:1)
因为hp.choice
将返回索引而不是您的限制中的item值。例如,0
表示max_depth
的值为2。