#!/usr/bin/env python # coding: utf-8 # In[1]: import numpy as np from sklearn.datasets import load_iris from sklearn.tree import DecisionTreeClassifier from sklearn.ensemble import RandomForestClassifier as skRandomForestClassifier # In[2]: def accuracy_score(y_true, y_pred): return np.mean(y_true == y_pred) # In[3]: class RandomForestClassifier(): def __init__(self, n_estimators=100, max_depth=None, max_features="auto", oob_score=False, random_state=0): self.n_estimators = n_estimators self.max_depth = max_depth self.max_features = max_features self.oob_score = oob_score self.random_state = random_state def fit(self, X, y): self.n_features_ = X.shape[1] self.classes_, y_train = np.unique(y, return_inverse=True) self.n_classes_ = len(self.classes_) MAX_INT = np.iinfo(np.int32).max rng = np.random.RandomState(self.random_state) self.estimators_ = [] for i in range(self.n_estimators): est = DecisionTreeClassifier(max_depth=self.max_depth, max_features=self.max_features, random_state=rng.randint(MAX_INT)) sample_rng = np.random.RandomState(est.random_state) sample_indices = sample_rng.randint(0, X.shape[0], X.shape[0]) sample_counts = np.bincount(sample_indices, minlength=X.shape[0]) est.fit(X, y_train, sample_weight=sample_counts) self.estimators_.append(est) if self.oob_score: self._set_oob_score(X, y_train) return self def _set_oob_score(self, X, y): predictions = np.zeros((X.shape[0], self.n_classes_)) for i in range(self.n_estimators): sample_rng = np.random.RandomState(self.estimators_[i].random_state) sample_indices = sample_rng.randint(0, X.shape[0], X.shape[0]) mask = np.ones(X.shape[0], dtype=bool) mask[sample_indices] = False predictions[mask] += self.estimators_[i].predict_proba(X[mask]) self.oob_decision_function_ = predictions / np.sum(predictions, axis=1)[:, np.newaxis] self.oob_score_ = accuracy_score(y, np.argmax(predictions, axis=1)) def predict_proba(self, X): proba = np.zeros((X.shape[0], self.n_classes_)) for i in range(self.n_estimators): proba += self.estimators_[i].predict_proba(X) proba /= self.n_estimators return proba def predict(self, X): proba = self.predict_proba(X) return self.classes_[np.argmax(proba, axis=1)] @property def feature_importances_(self): all_importances = np.zeros(self.n_features_) for i in range(self.n_estimators): all_importances += self.estimators_[i].feature_importances_ all_importances /= self.n_estimators return all_importances / np.sum(all_importances) # In[4]: X, y = load_iris(return_X_y=True) clf1 = RandomForestClassifier(random_state=0, oob_score=True).fit(X, y) clf2 = skRandomForestClassifier(random_state=0, oob_score=True).fit(X, y) pred1 = clf1.predict(X) pred2 = clf2.predict(X) assert np.array_equal(pred1, pred2) prob1 = clf1.predict_proba(X) prob2 = clf2.predict_proba(X) assert np.allclose(prob1, prob2) assert np.allclose(clf1.oob_decision_function_, clf2.oob_decision_function_) assert np.allclose(clf1.oob_score_, clf2.oob_score_) assert np.allclose(clf1.feature_importances_, clf2.feature_importances_)