-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathrandom_forest.py
105 lines (85 loc) · 3.84 KB
/
random_forest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import multiprocessing as mp
from functools import partial
import numpy as np
from ..base import BaseClassifier,BaseRegressor
from ..ml.decison_tree import DecisionTreeClassifier,DecisionTreeRegressor
class Base_Randomforest:
def __init__(self,basetree,basetree_params={},n_tree = 20,samples_ratio=0.6,parallelize=False):
self.basetree = basetree
self.basetree_params = basetree_params
self.parallelize = parallelize
self.n_tree = n_tree
self.samples_ratio = samples_ratio
self.estimators = []
def fit(self,X,y):
# Draw bootstrap samples
n_sample = int(X.shape[0] * self.samples_ratio)
samples_idx = np.random.choice(X.shape[0],size=self.n_tree*n_sample ,replace=True).reshape(self.n_tree,n_sample)
# Create base tree
self.estimators = []
[self.estimators.append(self.basetree(**self.basetree_params)) for _ in range(self.n_tree)]
# Fit the trees
if self.parallelize :
fit_func = [partial(self.estimators[i].fit,X[samples_idx[i]],y[samples_idx[i]]) for i in range(self.n_tree)]
pool = mp.Pool(mp.cpu_count())
[pool.apply(fit_func[i]) for i in range(self.n_tree)]
else :
for i in range(self.n_tree) :
self.estimators[i].fit(X[samples_idx[i]],y[samples_idx[i]])
def predict_all_trees(self,X):
if self.parallelize :
pool = mp.Pool(mp.cpu_count())
for i in range(self.n_tree):
res = [pool.apply(self.estimators[i].predict,(X,)) for i in range(self.n_tree)]
else :
res = []
for i in range(self.n_tree):
res.append(self.estimators[i].predict(X))
return res
class RandomForestClassifier(Base_Randomforest,BaseClassifier):
''' Random Forest Classfier
Ref : Ho, T.K. (1995) Random Decision Forest. Proceedings of the 3rd International Conference on Document Analysis and Recognition, Montreal, 14-16 August 1995, 278-282.
Parameters
----------
basetree : object,
decision tree classifier object with fit and predict methods
(may be also another base estimator)
basetree_params : dict,
parameters of base tree
parallelize : bool,
n_tree : int,
number of trees in the forest
'''
def __init__(self,basetree=DecisionTreeClassifier,basetree_params={},n_tree = 20,parallelize=True):
super().__init__(basetree,basetree_params,n_tree,parallelize)
def fit(self,X,y):
self.labels = np.unique(y)
return super().fit(X,y)
def predict(self,X):
res = super().predict_all_trees(X)
# Take the most common value
res = np.array(res)
decision = []
for col in range(res.shape[1]):
values,counts = np.unique(res[:,col],return_counts=True)
decision.append(values[counts.argmax()])
return decision
class RandomForestRegressor(Base_Randomforest,BaseRegressor):
''' Random Forest Classfier
Ref : Ho, T.K. (1995) Random Decision Forest. Proceedings of the 3rd International Conference on Document Analysis and Recognition, Montreal, 14-16 August 1995, 278-282.
Parameters
----------
basetree : object,
decision tree regressor object with fit and predict methods
(may be also another base estimator)
basetree_params : dict,
parameters of base tree
parallelize : bool,
n_tree : int,
number of trees in the forest
'''
def __init__(self,basetree=DecisionTreeRegressor,basetree_params={},n_tree = 20,parallelize=True):
super().__init__(basetree=basetree,basetree_params=basetree_params,n_tree=n_tree,parallelize=parallelize)
def predict(self,X):
res = self.predict_all_trees(X)
return np.mean(res,axis=0)