-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathtrain.py
132 lines (105 loc) · 4.54 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
"""
Created on Sat Mar 17 11:01:27 2018
@author: leo
"""
import warnings
warnings.filterwarnings("ignore")
import pickle
import numpy as np
#import matplotlib as mpl
#mpl.use('Agg')
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers import MaxPooling2D
from keras.layers.convolutional import Conv2D
from keras import regularizers
from keras.optimizers import Adadelta
from sklearn.utils import shuffle
# Build model
def build_model(input_shape, conv_window_size, num_filters, reg, dropout):
model = Sequential()
#model.add(Embedding(max_features,300))
# we add a Convolution 1D, which will learn num_filters
# word group filters of size conv_window_size:
model.add(Conv2D(input_shape=input_shape,
filters=num_filters,
kernel_size=conv_window_size,
padding="valid",
activation="relu",
strides=1,
data_format='channels_first'))
model.add(MaxPooling2D(pool_size=(num_filters, 1)))
model.add(Flatten())
model.add(Dropout(dropout))
model.add(Dense(1, activation='softmax', kernel_regularizer=regularizers.l2(reg)))
#In addition, an l2−norm constraint of the weights w_r is imposed during training as well
model.compile(loss='binary_crossentropy',
optimizer=Adadelta(),
metrics=['mae'])
return model
def train(model, x_train, y_train, val_train_ratio=0.2, epochs=1000, batch_size=128):
history = model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
validation_split=val_train_ratio,
shuffle=False,
verbose=1)
return history
def load_data():
print("loading pickle files...")
data1 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency1.pickle", "rb"))
data2 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency2.pickle", "rb"))
data3 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency3.pickle", "rb"))
data4 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency4.pickle", "rb"))
data5 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency5.pickle", "rb"))
data6 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency6.pickle", "rb"))
data7 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency7.pickle", "rb"))
data8 = pickle.load(open("/global/scratch/alex_vlissidis/wordEmbeddingsToSaliency8.pickle", "rb"))
print("concatenating data...")
data = np.concatenate((data1, data2, data3, data4, data5, data6, data7, data8), axis=0)
print("extracting x and y...")
x = data[::2]
y = data[1::2]
del data
print("converting x to np tensor...")
x = np.dstack(x)
x = np.rollaxis(x, -1)
x = np.expand_dims(x, axis=1)
mask = y==-1
print("removing -1s...")
x = x[~mask, :]
y = y[~mask]
print("data loaded.")
x, y = shuffle(x, y)
return x, y
def main():
# Model Hyperparameters
conv_window_size = (3, 300)
num_filters = 400
reg = 0.01
dropout = 0.5
# Training parameters
epochs = 10
batch_size = 256
test_train_ratio = 0.2
val_train_ratio = 0.2
x_train, y_train = load_data()
print("training data:", x_train.shape, y_train.shape)
model = build_model((1, x_train.shape[2], x_train.shape[3]), conv_window_size, num_filters, reg, dropout)
history = train(model, x_train, y_train, val_train_ratio, epochs, batch_size)
print("Saving model...")
model.model.save('model-softmax.h5')
print("Plotting...")
f, (ax1, ax2) = plt.subplots(2, 1)
ax1.plot(range(1, epochs+1), history.history['val_mean_absolute_error'], 'tab:blue', label="validation MAE")
ax1.plot(range(1, epochs+1), history.history['mean_absolute_error'], 'tab:red', label="training MAE")
ax2.plot(range(1, epochs+1), history.history['loss'], 'tab:orange', label="loss")
ax2.plot(range(1, epochs+1), history.history['val_loss'], 'tab:green', label="validation loss")
ax1.legend()
ax2.legend()
f.savefig('training-softmax.png', dpi=300)
plt.show()
print("Done.")
if __name__ == "__main__":
main()