#!/usr/bin/env python # coding: utf-8 # ## Regression with AdaBoost regressor # In[1]: get_ipython().run_line_magic('matplotlib', 'inline') import matplotlib.pyplot as plt import seaborn as sns import pandas as pd import numpy as np from sklearn import ensemble, datasets, metrics, model_selection # In[2]: boston = datasets.load_boston() print(boston.DESCR) # In[3]: X = pd.DataFrame(boston.data, columns=boston.feature_names) y = boston.target # In[4]: X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, train_size=0.7) print('train samples:', len(X_train)) print('test samples', len(X_test)) # In[5]: df_train = pd.DataFrame(y_train, columns=['target']) df_train['type'] = 'train' df_test = pd.DataFrame(y_test, columns=['target']) df_test['type'] = 'test' df_set = df_train.append(df_test) _ = sns.displot(df_set, x="target" ,hue="type", kind="kde", log_scale=False) # In[6]: model = ensemble.AdaBoostRegressor(n_estimators=50) model.fit(X_train, y_train) # In[7]: predicted = model.predict(X_test) fig, ax = plt.subplots() ax.scatter(y_test, predicted) ax.set_xlabel('True Values') ax.set_ylabel('Predicted') _ = ax.plot([0, y.max()], [0, y.max()], ls='-', color='red') # In[8]: residual = y_test - predicted fig, ax = plt.subplots() ax.scatter(y_test, residual) ax.set_xlabel('y') ax.set_ylabel('residual') _ = plt.axhline(0, color='red', ls='--') # In[9]: _ = sns.displot(residual, kind="kde"); # In[10]: print("r2 score: {}".format(metrics.r2_score(y_test, predicted))) print("mse: {}".format(metrics.mean_squared_error(y_test, predicted))) print("rmse: {}".format(np.sqrt(metrics.mean_squared_error(y_test, predicted)))) print("mae: {}".format(metrics.mean_absolute_error(y_test, predicted)))