-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSingleLayerNeuralNetwork.py
73 lines (50 loc) · 2.16 KB
/
SingleLayerNeuralNetwork.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# coding: utf-8
# In[1]:
import tensorflow as tf
import numpy as np
import os
import pandas as pd
import matplotlib.pyplot as plt
bbs = np.loadtxt('bbs-train.txt')
label = np.loadtxt('label-train.txt')
label = label[:,1]
label_pro = np.empty([len(label),2])
for i in range(len(label)):
if label[i] == 0:
label_pro[i] = [1,0]
elif label[i] == 1:
label_pro[i] = [0,1]
from sklearn.cross_validation import train_test_split
x_train, x_test, y_train, y_test = train_test_split(bbs, label_pro, test_size=0.2, random_state=0)
x = tf.placeholder("float", [None, 800])
y_ = tf.placeholder("float", [None, 2])
# In[2]:
W = tf.Variable(tf.random_normal([800, 2]))
b = tf.Variable(tf.random_normal([2]))
y= tf.nn.sigmoid(tf.matmul(x,W) + b)
#cross_entropy = -tf.reduce_sum(y_*tf.log(y+0.00001))
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits= y))
#train_step = tf.train.RMSPropOptimizer(2e-5).minimize(cross_entropy)
#train_step = tf.train.ProximalAdagradOptimizer(2e-5).minimize(cross_entropy)
#train_step = tf.train.AdamOptimizer(2e-5).minimize(cross_entropy)
train_step = tf.train.GradientDescentOptimizer(2e-5).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
sess = tf.Session()
sess.run(tf.global_variables_initializer())
error = []
_result = []
for j in range(800):
for i in range(12):
random_select = np.random.randint(0,len(y_train), 2000)
xs = [x_train[k] for k in random_select]
ys = [y_train[k] for k in random_select]
sess.run(train_step, feed_dict={x: x_train, y_: y_train})
train_accuracy, loss = sess.run([accuracy,cross_entropy]
, feed_dict={x:x_train, y_: y_train})
error.append(loss)
if j%50 == 0:
print("step %d, training accuracy=%g"%(j, train_accuracy),"test accuracy=%g"% sess.run(accuracy, feed_dict={x: x_test, y_: y_test}))
print("loss=", loss)
train_accuracy, loss = sess.run([accuracy,cross_entropy] , feed_dict={x:x_train, y_: y_train})
# In[ ]: