forked from ZiyaoGeng/RecLearn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
modules.py
70 lines (61 loc) · 2.58 KB
/
modules.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
"""
Created on April 25, 2021
modules of DeepFM: FM, DNN
@author: Ziyao Geng([email protected])
"""
import tensorflow as tf
from tensorflow.keras.regularizers import l2
from tensorflow.keras.layers import Dropout, Dense, Layer
class FM(Layer):
"""
Wide part
"""
def __init__(self, feature_length, w_reg=1e-6):
"""
Factorization Machine
In DeepFM, only the first order feature and second order feature intersect are included.
:param feature_length: A scalar. The length of features.
:param w_reg: A scalar. The regularization coefficient of parameter w.
"""
super(FM, self).__init__()
self.feature_length = feature_length
self.w_reg = w_reg
def build(self, input_shape):
self.w = self.add_weight(name='w', shape=(self.feature_length, 1),
initializer='random_normal',
regularizer=l2(self.w_reg),
trainable=True)
def call(self, inputs, **kwargs):
"""
:param inputs: A dict with shape `(batch_size, {'sparse_inputs', 'embed_inputs'})`:
sparse_inputs is 2D tensor with shape `(batch_size, sum(field_num))`
embed_inputs is 3D tensor with shape `(batch_size, fields, embed_dim)`
"""
sparse_inputs, embed_inputs = inputs['sparse_inputs'], inputs['embed_inputs']
# first order
first_order = tf.reduce_sum(tf.nn.embedding_lookup(self.w, sparse_inputs), axis=1) # (batch_size, 1)
# second order
square_sum = tf.square(tf.reduce_sum(embed_inputs, axis=1, keepdims=True)) # (batch_size, 1, embed_dim)
sum_square = tf.reduce_sum(tf.square(embed_inputs), axis=1, keepdims=True) # (batch_size, 1, embed_dim)
second_order = 0.5 * tf.reduce_sum(square_sum - sum_square, axis=2) # (batch_size, 1)
return first_order + second_order
class DNN(Layer):
"""
Deep part
"""
def __init__(self, hidden_units, activation='relu', dnn_dropout=0.):
"""
DNN part
:param hidden_units: A list like `[unit1, unit2,...,]`. List of hidden layer units's numbers
:param activation: A string. Activation function.
:param dnn_dropout: A scalar. dropout number.
"""
super(DNN, self).__init__()
self.dnn_network = [Dense(units=unit, activation=activation) for unit in hidden_units]
self.dropout = Dropout(dnn_dropout)
def call(self, inputs, **kwargs):
x = inputs
for dnn in self.dnn_network:
x = dnn(x)
x = self.dropout(x)
return x