Skip to content

Commit

Permalink
fix bug; was ignoring the activation argument
Browse files Browse the repository at this point in the history
  • Loading branch information
borauyar committed May 14, 2024
1 parent d150f8d commit 880b9c0
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions flexynesis/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def forward(self, x):

class GNNs(nn.Module):
def __init__(self, input_dim, hidden_dim, output_dim,
conv='GC', act = None):
conv='GC', act = 'relu'):
super().__init__()

act_options = {
Expand All @@ -218,16 +218,16 @@ def __init__(self, input_dim, hidden_dim, output_dim,

self.conv = conv_options[conv]
self.layer_1 = self.conv(input_dim, hidden_dim)
self.relu_1 = nn.ReLU()
self.act_1 = self.activation
self.layer_2 = self.conv(hidden_dim, output_dim)
self.relu_2 = nn.ReLU()
self.act_2 = self.activation
self.aggregation = aggr.SumAggregation()

def forward(self, x, edge_index, batch):
x = self.layer_1(x, edge_index)
x = self.relu_1(x)
x = self.act_1(x)
x = self.layer_2(x, edge_index)
x = self.relu_2(x)
x = self.act_2(x)
x = self.aggregation(x, batch)
return x

Expand Down

0 comments on commit 880b9c0

Please sign in to comment.