-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMultiLayerNeuralNetwork.py
87 lines (54 loc) · 2.33 KB
/
MultiLayerNeuralNetwork.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# coding: utf-8
# In[1]:
#package import and simplification
import numpy as np
import pandas as pd
import tensorflow as tf
from sklearn.cross_validation import train_test_split
bbs = np.loadtxt('bbs-train.txt')
label = np.loadtxt('label-train.txt')
label = label[:,1]
label_pro = np.empty([len(label),2])
for i in range(len(label)):
if label[i] == 0:
label_pro[i] = [1,0]
elif label[i] == 1:
label_pro[i] = [0,1]
x_train, x_test, y_train, y_test = train_test_split(bbs, label_pro, test_size=0.2, random_state=0)
x = tf.placeholder("float", [None, 800])
y_ = tf.placeholder("float", [None, 2])
# In[2]:
#weight and bias setup
W = tf.Variable(tf.random_normal([800, 200]))
b = tf.Variable(tf.random_normal([200]))
W2 = tf.Variable(tf.random_normal([200, 2]))
b2 = tf.Variable(tf.random_normal([2]))
layer1 = tf.nn.sigmoid(tf.matmul(x,W) + b)
y= tf.nn.sigmoid(tf.matmul(layer1,W2) + b2)
#cross_entropy = -tf.reduce_sum(y_*tf.log(y+0.00001))
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits= y))
#train_step = tf.train.RMSPropOptimizer(2e-5).minimize(cross_entropy)
#train_step = tf.train.GradientDescentOptimizer(2e-5).minimize(cross_entropy)
train_step = tf.train.ProximalAdagradOptimizer(2e-5).minimize(cross_entropy)
#train_step = tf.train.AdamOptimizer(2e-5).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
sess = tf.Session()
sess.run(tf.global_variables_initializer())
error = []
_result = []
for j in range(800):
for i in range(12):
random_select = np.random.randint(0,len(y_train), 2000)
xs = [x_train[k] for k in random_select]
ys = [y_train[k] for k in random_select]
sess.run(train_step, feed_dict={x: x_train, y_: y_train})
train_accuracy, loss = sess.run([accuracy,cross_entropy]
, feed_dict={x:x_train, y_: y_train})
error.append(loss)
if j%50 == 0:
print("step %d, training accuracy =%g"%(j, train_accuracy),"test accuracy =%g"% sess.run(accuracy, feed_dict={x: x_test, y_: y_test}))
print("loss = ", loss)
train_accuracy, loss = sess.run([accuracy,cross_entropy], feed_dict={x:x_train, y_: y_train})
# In[ ]:
# In[ ]: