[docs]classHNHNConv(nn.Module):r"""The HNHN convolution layer proposed in `HNHN: Hypergraph Networks with Hyperedge Neurons <https://arxiv.org/pdf/2006.12278.pdf>`_ paper (ICML 2020). Parameters: ``in_channels`` (``int``): :math:`C_{in}` is the number of input channels. ``out_channels`` (int): :math:`C_{out}` is the number of output channels. ``bias`` (``bool``): If set to ``False``, the layer will not learn the bias parameter. Defaults to ``True``. ``use_bn`` (``bool``): If set to ``True``, the layer will use batch normalization. Defaults to ``False``. ``drop_rate`` (``float``): If set to a positive number, the layer will use dropout. Defaults to ``0.5``. ``is_last`` (``bool``): If set to ``True``, the layer will not apply the final activation and dropout functions. Defaults to ``False``. """def__init__(self,in_channels:int,out_channels:int,bias:bool=True,use_bn:bool=False,drop_rate:float=0.5,is_last:bool=False,):super().__init__()self.is_last=is_lastself.bn=nn.BatchNorm1d(out_channels)ifuse_bnelseNoneself.act=nn.ReLU(inplace=True)self.drop=nn.Dropout(drop_rate)self.theta_v2e=nn.Linear(in_channels,out_channels,bias=bias)self.theta_e2v=nn.Linear(out_channels,out_channels,bias=bias)
[docs]defforward(self,X:torch.Tensor,hg:Hypergraph)->torch.Tensor:r"""The forward function. Parameters: X (``torch.Tensor``): Input vertex feature matrix. Size :math:`(|\mathcal{V}|, C_{in})`. hg (``eg.Hypergraph``): The hypergraph structure that contains :math:`|\mathcal{V}|` vertices. """# v -> eX=self.theta_v2e(X)ifself.bnisnotNone:X=self.bn(X)Y=self.act(hg.v2e(X,aggr="mean"))# e -> vY=self.theta_e2v(Y)X=hg.e2v(Y,aggr="mean")ifnotself.is_last:X=self.drop(self.act(X))returnX