回归预测之XGBoost——运行+调优


import xgboost as xgb
from xgboost import plot_importance
from xgboost.sklearn import XGBClassifier,XGBRegressor
xgb_reg = xgb.XGBRegressor(n_estimators=750
                           ,max_depth=9
                           ,colsample_bytree=0.5
                           ,reg_alpha=0.05
                           ,subsample=0.6
)
xgb_reg.fit(X_train, y_train)
y_pred=xgb_reg.predict(X_test)
print(y_pred)
# 返回拟合优度the coefficient of determination
xgb_reg.score(X_test,y_test)
# mse
metrics.mean_squared_error(y_test, y_pred)
xgb_reg.set_params()
xgb_reg.feature_importances_
plt.figure(figsize=(15, 10))
# 创建t变量
t = np.arange(len(X_test))
# 绘制y_test曲线
plt.plot(t, y_test, 'r', linewidth=2, label='真实值')
# 绘制y_hat曲线
plt.plot(t, y_pred, 'g', linewidth=2, label='预测值')
# 设置图例
plt.legend()
plt.show()

image-20220219174015590

粗调

# 参数设定
xgb_params = {'max_depth':[6,9],    
             'subsample':[0.6,0.9,1],
             'colsample_bytree':[0.5,0.6],
             'reg_alpha':[0,0.05,0.1],
             'n_estimators':[750,1000]}
# 参数搜索
xgb_gridsearch = GridSearchCV(xgb_reg,xgb_params,cv=3,n_jobs=-1,
                                verbose=10,refit=True)
xgb_gridsearch.fit(X_train, y_train)
y_pred=xgb_gridsearch.predict(X_test)
print(y_pred)
# 拟合优度R2
print("r2:", xgb_gridsearch.score(X_test, y_test))
# 用Scikit_learn计算MSE
print("MSE:", metrics.mean_squared_error(y_test, y_pred))
print(xgb_gridsearch.best_params_)

将得到的参数,代入模型,重跑。