-
Notifications
You must be signed in to change notification settings - Fork 36
/
Copy pathembedding.py
77 lines (68 loc) · 3.56 KB
/
embedding.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import tensorflow as tf
import numpy as np
class MovieGenreEmbedding(tf.keras.Model):
def __init__(self, len_movies, len_genres, embedding_dim):
super(MovieGenreEmbedding, self).__init__()
self.m_g_input = tf.keras.layers.InputLayer(name='input_layer', input_shape=(2,))
# embedding
self.m_embedding = tf.keras.layers.Embedding(name='movie_embedding', input_dim=len_movies, output_dim=embedding_dim)
self.g_embedding = tf.keras.layers.Embedding(name='genre_embedding', input_dim=len_genres, output_dim=embedding_dim)
# dot product
self.m_g_merge = tf.keras.layers.Dot(name='movie_genre_dot', normalize=True, axes=1)
# output
self.m_g_fc = tf.keras.layers.Dense(1, activation='sigmoid')
def call(self, x):
x = self.m_g_input(x)
memb = self.m_embedding(x[0])
gemb = self.g_embedding(x[1])
m_g = self.m_g_merge([memb, gemb])
return self.m_g_fc(m_g)
# class UserMovieEmbedding(tf.keras.Model):
# def __init__(self, len_users, embedding_dim):
# super(UserMovieEmbedding, self).__init__()
# self.m_u_input = tf.keras.layers.InputLayer(name='input_layer', input_shape=(2,))
# # embedding
# self.u_embedding = tf.keras.layers.Embedding(name='user_embedding', input_dim=len_users, output_dim=embedding_dim)
# # dot product
# self.m_u_merge = tf.keras.layers.Dot(name='movie_user_dot', normalize=False, axes=1)
# # output
# self.m_u_fc = tf.keras.layers.Dense(1, activation='sigmoid')
# def call(self, x):
# x = self.m_u_input(x)
# uemb = self.u_embedding(x[0])
# m_u = self.m_u_merge([x[1], uemb])
# return self.m_u_fc(m_u)
class UserMovieEmbedding(tf.keras.Model):
def __init__(self, len_users, len_movies, embedding_dim):
super(UserMovieEmbedding, self).__init__()
self.m_u_input = tf.keras.layers.InputLayer(name='input_layer', input_shape=(2,))
# embedding
self.u_embedding = tf.keras.layers.Embedding(name='user_embedding', input_dim=len_users, output_dim=embedding_dim)
self.m_embedding = tf.keras.layers.Embedding(name='movie_embedding', input_dim=len_movies, output_dim=embedding_dim)
# dot product
self.m_u_merge = tf.keras.layers.Dot(name='movie_user_dot', normalize=False, axes=1)
# output
self.m_u_fc = tf.keras.layers.Dense(1, activation='sigmoid')
def call(self, x):
x = self.m_u_input(x)
uemb = self.u_embedding(x[0])
memb = self.m_embedding(x[1])
m_u = self.m_u_merge([memb, uemb])
return self.m_u_fc(m_u)
# class UserMovieEmbedding(tf.keras.Model):
# def __init__(self, len_users, len_movies, embedding_dim):
# super(UserMovieEmbedding, self).__init__()
# self.m_u_input = tf.keras.layers.InputLayer(name='input_layer', input_shape=(2,))
# # embedding
# self.u_embedding = tf.keras.layers.Embedding(name='user_embedding', input_dim=len_users, output_dim=embedding_dim)
# self.m_embedding = tf.keras.layers.Embedding(name='movie_embedding', input_dim=len_movies, output_dim=embedding_dim)
# # dot product
# self.m_u_concat = tf.keras.layers.Concatenate(name='movie_user_concat', axis=1)
# # output
# self.m_u_fc = tf.keras.layers.Dense(1, activation='sigmoid')
# def call(self, x):
# x = self.m_u_input(x)
# uemb = self.u_embedding(x[0])
# memb = self.m_embedding(x[1])
# m_u = self.m_u_concat([memb, uemb])
# return self.m_u_fc(m_u)