Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…rks.jl into nn-lux
  • Loading branch information
rbSparky committed Aug 19, 2024
2 parents 70674a2 + 7f1a07a commit 17627f1
Showing 1 changed file with 2 additions and 13 deletions.
15 changes: 2 additions & 13 deletions GNNLux/src/layers/conv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -666,25 +666,14 @@ function (l::NNConv)(g, x, edge_weight, ps, st)
return y, stnew
end

function LuxCore.initialparameters(rng::AbstractRNG, l::NNConv)
weight = l.init_weight(rng, l.out_dims, l.in_dims)
if l.use_bias
bias = l.init_bias(rng, l.out_dims)
return (; weight, bias)
else
return (; weight)
end
end

LuxCore.parameterlength(l::NNConv) = l.use_bias ? l.in_dims * l.out_dims + l.out_dims : l.in_dims * l.out_dims # nn wont affect this right?
LuxCore.outputsize(d::NNConv) = (d.out_dims,)

function Base.show(io::IO, l::GINConv)
function Base.show(io::IO, l::NNConv)
print(io, "NNConv($(l.nn)")
print(io, ", $(l.ϵ)")
l.σ == identity || print(io, ", ", l.σ)
l.use_bias || print(io, ", use_bias=false")
l.add_self_loops || print(io, ", add_self_loops=false")
!l.use_edge_weight || print(io, ", use_edge_weight=true")
print(io, ")")
end
end

0 comments on commit 17627f1

Please sign in to comment.