Skip to content

Commit

Permalink
Merge pull request #106 from BUPT-GAMMA/docs
Browse files Browse the repository at this point in the history
[Docs]fix docs bugs
  • Loading branch information
Zhanghyi committed Jun 26, 2022
2 parents 78c8684 + 02639c8 commit ad77ad6
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 17 deletions.
18 changes: 9 additions & 9 deletions openhgnn/layers/HeteroLinear.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,15 @@ class HeteroLinearLayer(nn.Module):
Examples
----------
>>>import torch as th
>>>linear_dict = {}
>>>linear_dict['author'] = [110, 64]
>>>linear_dict['paper'] = [128,64]
>>>h_dict = {}
>>>h_dict['author'] = th.tensor(10, 110)
>>>h_dict['paper'] = th.tensor(5, 128)
>>>layer = HeteroLinearLayer(linear_dict)
>>>out_dict = layer(h_dict)
>>> import torch as th
>>> linear_dict = {}
>>> linear_dict['author'] = [110, 64]
>>> linear_dict['paper'] = [128,64]
>>> h_dict = {}
>>> h_dict['author'] = th.tensor(10, 110)
>>> h_dict['paper'] = th.tensor(5, 128)
>>> layer = HeteroLinearLayer(linear_dict)
>>> out_dict = layer(h_dict)
"""
def __init__(self, linear_dict, act=None, dropout=0.0, has_l2norm=True, has_bn=True, **kwargs):
Expand Down
2 changes: 2 additions & 0 deletions openhgnn/layers/macro_layer/ATTConv.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ def forward(self, hg, h_neigh, h_center):

class MacroConv(nn.Module):
"""
MacroConv
Parameters
----------
in_feats : int
Expand Down
16 changes: 11 additions & 5 deletions openhgnn/layers/micro_layer/HGConv.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,20 @@ def forward(self,
src_node_transformation_weight: nn.Parameter,
src_nodes_attention_weight: nn.Parameter):
r"""Compute graph attention network layer.
Parameters
----------
graph : specific relational DGLHeteroGraph
feat : pair of torch.Tensor
graph:
specific relational DGLHeteroGraph
feat: pair of torch.Tensor
The pair contains two tensors of shape (N_{in}, D_{in_{src}})` and (N_{out}, D_{in_{dst}}).
dst_node_transformation_weight: Parameter (input_dst_dim, n_heads * hidden_dim)
src_node_transformation_weight: Parameter (input_src_dim, n_heads * hidden_dim)
src_nodes_attention_weight: Parameter (n_heads, 2 * hidden_dim)
dst_node_transformation_weight:
Parameter (input_dst_dim, n_heads * hidden_dim)
src_node_transformation_weight:
Parameter (input_src_dim, n_heads * hidden_dim)
src_nodes_attention_weight:
Parameter (n_heads, 2 * hidden_dim)
Returns
-------
torch.Tensor, shape (N, H, D_out)` where H is the number of heads, and D_out is size of output feature.
Expand Down
3 changes: 0 additions & 3 deletions openhgnn/models/SLiCE.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@ def get_norm_id(id_map, some_id):
id_map[some_id] = len(id_map)
return id_map[some_id]

"""
从大图中根据一组边获得一个子图
"""
def norm_graph(node_id_map, edge_id_map, edge_list):
norm_edge_list = []
for e in edge_list:
Expand Down

0 comments on commit ad77ad6

Please sign in to comment.