forked from kevin-keraudren/randomferns-python
-
Notifications
You must be signed in to change notification settings - Fork 0
/
weakLearner.py
123 lines (104 loc) · 4.06 KB
/
weakLearner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import numpy as np
__all__ = [ "AxisAligned",
"Linear",
"Conic",
"Parabola" ]
class WeakLearner:
def generate_all(self, points, count ):
return None
def __str__(self):
return None
def run(self, point, test):
return None
class AxisAligned(WeakLearner):
"""Axis aligned"""
def __str__(self):
return "AxisAligned"
def generate_all(self, points, count ):
x_min = points.min(0)[0]
y_min = points.min(0)[1]
x_max = points.max(0)[0]
y_max = points.max(0)[1]
tests = []
tests.extend( zip(np.zeros(count/2,dtype=int), np.random.uniform(x_min,x_max,count/2)))
tests.extend( zip(np.ones(count/2,dtype=int), np.random.uniform(y_min,y_max,count/2)))
return np.array(tests)
def run(self, points, tests):
return map( lambda test: points[:,test[0]] > test[1], tests )
class Linear(WeakLearner):
"""Linear"""
def __str__(self):
return "Linear"
def generate_all(self, points, count ):
x_min = points.min(0)[0]
y_min = points.min(0)[1]
x_max = points.max(0)[0]
y_max = points.max(0)[1]
tests = []
tests.extend( zip(np.random.uniform(x_min,x_max,count),
np.random.uniform(y_min,y_max,count),
np.random.uniform(0,360,count)))
return tests
def run(self, points, tests):
def _run( test ):
theta = test[2]*np.pi/180
return ( np.cos(theta)*(points[:,0]-test[0]) +
np.sin(theta)*(points[:,1]-test[1]) ) > 0
return map( _run, tests )
class Conic(WeakLearner):
"""Non-linear: conic"""
def __str__(self):
return "Conic"
def generate_all(self, points, count ):
x_min = points.min(0)[0]
y_min = points.min(0)[1]
x_max = points.max(0)[0]
y_max = points.max(0)[1]
scale = max( points.max(),abs(points.min()) )
tests = []
tests.extend( zip( np.random.uniform(x_min,x_max,count),
np.random.uniform(y_min,y_max,count),
np.random.uniform(-scale,scale,count)*np.random.random_integers(0,1,count),
np.random.uniform(-scale,scale,count)*np.random.random_integers(0,1,count),
np.random.uniform(-scale,scale,count)*np.random.random_integers(0,1,count),
np.random.uniform(-scale,scale,count)*np.random.random_integers(0,1,count),
np.random.uniform(-scale,scale,count)*np.random.random_integers(0,1,count),
np.random.uniform(-scale,scale,count)*np.random.random_integers(0,1,count)
)
)
return tests
def run( self, points, tests ):
def _run( test ):
x = (points[:,0]-test[0])
y = (points[:,1]-test[1])
A,B,C,D,E,F = test[2:]
return ( A*x*x + B*y*y + C*x*x + D*x + E*y + F) > 0
return map( _run, tests )
class Parabola(WeakLearner):
"""Non-linear: parabola"""
def __str__(self):
return "Parabola"
def generate_all(self, points, count ):
x_min = points.min(0)[0]
y_min = points.min(0)[1]
x_max = points.max(0)[0]
y_max = points.max(0)[1]
scale = abs( points.max()-points.min() )
tests = []
tests.extend( zip( np.random.uniform(2*x_min,2*x_max,count),
np.random.uniform(2*y_min,2*y_max,count),
np.random.uniform(-scale,scale,count),
np.random.random_integers(0,1,count)
)
)
return tests
def run(self, points, tests):
def _run(test):
x = (points[:,0]-test[0])
y = (points[:,1]-test[1])
p,axis = test[2:]
if axis == 0:
return x*x < p*y
else:
return y*y < p*x
return map( _run, tests )