-
Notifications
You must be signed in to change notification settings - Fork 8
/
logReg_demo.py
46 lines (36 loc) · 974 Bytes
/
logReg_demo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
from optim import *
import matplotlib.pyplot as plt
from Activation import sigmoid
import numpy as np
from loadData import Data
from functools import partial
from logReg import *
# and function of 3 variables
pat1 = [[[0, 0, 0], [0]],
[[0, 0, 1], [1]],
[[0, 1, 0], [1]],
[[0, 1, 1], [1]],
[[1, 0, 0], [2]],
[[1, 0, 1], [3]],
[[1, 1, 0], [3]],
[[1, 1, 1], [3]],
]
d1 = Data()
d1.loadList(pat1, numClasses = 4)
#print d1.y
act = sigmoid().h # our activation function is simgmoid
model, J = trainOneVsAllGD(d1, act,epochs = 5000, lr = 0.25)
#print d1.y
plt.plot(np.transpose(J))
plt.show()
print predictMultiple(model, d1.X, act)
# d1.addBiasRow()
# theta_init = np.matrix(np.zeros((d1.n, 1)))
#
# cost = partial(logRegCost,data = d1,theta = theta_init, \
# regLambda = 0.001, activation = act)
# J, theta = gradDesc(cost, theta_init, 1000, 2.5)
# print 'model is :', list(np.transpose(theta).flat)
# plt.plot(J)
# plt.show()
# testLogReg(theta, d1, act)