You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
TAGConv layer from [Topology Adaptive Graph Convolutional Networks](https://arxiv.org/pdf/1710.10370.pdf).
1597
+
This layer extends the idea of graph convolutions by applying filters that adapt to the topology of the data.
1598
+
It performs the operation:
1599
+
1600
+
```math
1601
+
H^{K} = {\sum}_{k=0}^K (D^{-1/2} A D^{-1/2})^{k} X {\Theta}_{k}
1602
+
```
1603
+
1604
+
where `A` is the adjacency matrix of the graph, `D` is the degree matrix, `X` is the input feature matrix, and ``{\Theta}_{k}`` is a unique weight matrix for each hop `k`.
1605
+
1606
+
# Arguments
1607
+
- `in`: Number of input features.
1608
+
- `out`: Number of output features.
1609
+
- `k`: Maximum number of hops to consider. Default is `3`.
1610
+
- `bias`: Whether to include a learnable bias term. Default is `true`.
1611
+
- `init`: Initialization function for the weights. Default is `glorot_uniform`.
1612
+
- `add_self_loops`: Whether to add self-loops to the adjacency matrix. Default is `true`.
1613
+
- `use_edge_weight`: If `true`, edge weights are considered in the computation (if available). Default is `false`.
1614
+
1615
+
# Examples
1616
+
1617
+
```julia
1618
+
# Example graph data
1619
+
s = [1, 1, 2, 3]
1620
+
t = [2, 3, 1, 1]
1621
+
g = GNNGraph(s, t) # Create a graph
1622
+
x = randn(Float32, 3, g.num_nodes) # Random features for each node
1623
+
1624
+
# Create a TAGConv layer
1625
+
l = TAGConv(3 => 5, k=3; add_self_loops=true)
1626
+
1627
+
# Apply the TAGConv layer
1628
+
y = l(g, x) # Output size: 5 × num_nodes
1629
+
```
1630
+
"""
1631
+
struct TAGConv{A <:AbstractMatrix, B} <:GNNLayer
1632
+
weight::A
1633
+
bias::B
1634
+
k::Int
1635
+
add_self_loops::Bool
1636
+
use_edge_weight::Bool
1637
+
end
1638
+
1639
+
1640
+
@functor TAGConv
1641
+
1642
+
functionTAGConv(ch::Pair{Int, Int}, k =3;
1643
+
init = glorot_uniform,
1644
+
bias::Bool=true,
1645
+
add_self_loops =true,
1646
+
use_edge_weight =false)
1647
+
in, out = ch
1648
+
W =init(out, in)
1649
+
b = bias ? Flux.create_bias(W, true, out) :false
1650
+
TAGConv(W, b, k, add_self_loops, use_edge_weight)
1651
+
end
1652
+
1653
+
function (l::TAGConv)(g::GNNGraph, x::AbstractMatrix{T},
1654
+
edge_weight::EW=nothing) where
1655
+
{T, EW <:Union{Nothing, AbstractVector}}
1656
+
@assert!(g isa GNNGraph{<:ADJMAT_T} && edge_weight !==nothing) "Providing external edge_weight is not yet supported for adjacency matrix graphs"
1657
+
1658
+
if edge_weight !==nothing
1659
+
@assertlength(edge_weight)==g.num_edges "Wrong number of edge weights (expected $(g.num_edges) but given $(length(edge_weight)))"
0 commit comments