Source code for easygraph.model.hypergraphs.hnhn
import torch
import torch.nn as nn
from easygraph.nn import HNHNConv
[docs]class HNHN(nn.Module):
r"""The HNHN model proposed in `HNHN: Hypergraph Networks with Hyperedge Neurons <https://arxiv.org/pdf/2006.12278.pdf>`_ paper (ICML 2020).
Args:
``in_channels`` (``int``): :math:`C_{in}` is the number of input channels.
``hid_channels`` (``int``): :math:`C_{hid}` is the number of hidden channels.
``num_classes`` (``int``): The Number of class of the classification task.
``use_bn`` (``bool``): If set to ``True``, use batch normalization. Defaults to ``False``.
``drop_rate`` (``float``, optional): Dropout ratio. Defaults to ``0.5``.
"""
def __init__(
self,
in_channels: int,
hid_channels: int,
num_classes: int,
use_bn: bool = False,
drop_rate: float = 0.5,
) -> None:
super().__init__()
self.layers = nn.ModuleList()
self.layers.append(
HNHNConv(in_channels, hid_channels, use_bn=use_bn, drop_rate=drop_rate)
)
self.layers.append(
HNHNConv(hid_channels, num_classes, use_bn=use_bn, is_last=True)
)
[docs] def forward(self, X: torch.Tensor, hg: "eg.Hypergraph") -> torch.Tensor:
r"""The forward function.
Args:
``X`` (``torch.Tensor``): Input vertex feature matrix. Size :math:`(N, C_{in})`.
``hg`` (``eg.Hypergraph``): The hypergraph structure that contains :math:`N` vertices.
"""
for layer in self.layers:
X = layer(X, hg)
return X