diff --git a/machine_learning/XGBRegressor/XGBRegressor.py b/machine_learning/XGBRegressor/XGBRegressor.py new file mode 100644 index 0000000..9f86d60 --- /dev/null +++ b/machine_learning/XGBRegressor/XGBRegressor.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# coding: utf-8 + +# In[2]: + + +import xgboost as xgb +from sklearn.datasets import load_boston +from sklearn.model_selection import train_test_split +from sklearn.model_selection import cross_val_score, KFold +from sklearn.metrics import mean_squared_error +import matplotlib.pyplot as plt + + +# In[3]: + + +boston = load_boston() +x, y = boston.data, boston.target +xtrain, xtest, ytrain, ytest=train_test_split(x, y, test_size=0.15) + + +# In[5]: + + +from xgboost import XGBRegressor +xgbr = xgb.XGBRegressor(verbosity=0) +print(xgbr) +XGBRegressor(base_score=0.5, booster='gbtree', colsample_bylevel=1, + colsample_bynode=1, colsample_bytree=1, gamma=0, + importance_type='gain', learning_rate=0.1, max_delta_step=0, + max_depth=3, min_child_weight=1, missing=None, n_estimators=100, + n_jobs=1, nthread=None, objective='reg:linear', random_state=0, + reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=None, + silent=None, subsample=1, verbosity=1) + + +# In[6]: + + +xgbr.fit(xtrain, ytrain) + + +# In[8]: + + +score = xgbr.score(xtrain, ytrain) +print("Training score: ", score) + + +# In[12]: + + +scores = cross_val_score(xgbr, xtrain, ytrain,cv=10) +print("Mean cross-validation score: %.2f" % scores.mean()) + + +# In[13]: + + +kfold = KFold(n_splits=10, shuffle=True) +kf_cv_scores = cross_val_score(xgbr, xtrain, ytrain, cv=kfold ) +print("K-fold CV average score: %.2f" % kf_cv_scores.mean()) + + +# In[14]: + + +ypred = xgbr.predict(xtest) +mse = mean_squared_error(ytest, ypred) +print("MSE: %.2f" % mse) +MSE: 3.35 +print("RMSE: %.2f" % (mse**(1/2.0))) + + +# In[15]: + + +x_ax = range(len(ytest)) +plt.plot(x_ax, ytest, label="original") +plt.plot(x_ax, ypred, label="predicted") +plt.title("Boston test and predicted data") +plt.legend() +plt.show() + + +# In[ ]: + + + +