forked from ZiyaoGeng/RecLearn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
modules.py
52 lines (42 loc) · 1.6 KB
/
modules.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
"""
Created on May 19, 2021
modules of Wide&Deep: Linear, DNN
@author: Ziyao Geng([email protected])
"""
import tensorflow as tf
from tensorflow.keras.layers import Dense, Layer, Dropout
from tensorflow.keras.regularizers import l2
class Linear(Layer):
def __init__(self, feature_length, w_reg=1e-6):
"""
Linear Part
:param feature_length: A scalar. The length of features.
:param w_reg: A scalar. The regularization coefficient of parameter w.
"""
super(Linear, self).__init__()
self.feature_length = feature_length
self.w_reg = w_reg
def build(self, input_shape):
self.w = self.add_weight(name="w",
shape=(self.feature_length, 1),
regularizer=l2(self.w_reg),
trainable=True)
def call(self, inputs, **kwargs):
result = tf.reduce_sum(tf.nn.embedding_lookup(self.w, inputs), axis=1) # (batch_size, 1)
return result
class DNN(Layer):
def __init__(self, hidden_units, activation='relu', dropout=0.):
"""Deep Neural Network
:param hidden_units: A list. Neural network hidden units.
:param activation: A string. Activation function of dnn.
:param dropout: A scalar. Dropout number.
"""
super(DNN, self).__init__()
self.dnn_network = [Dense(units=unit, activation=activation) for unit in hidden_units]
self.dropout = Dropout(dropout)
def call(self, inputs, **kwargs):
x = inputs
for dnn in self.dnn_network:
x = dnn(x)
x = self.dropout(x)
return x