@@ -85,8 +85,8 @@ def __init__(self,
8585 self .out_feats = out_feats
8686 self .edge_embedding = tlx .nn .Embedding (num_etypes , edge_feats )
8787
88- self .fc_node = tlx .nn .Linear (out_feats * heads , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ), name = 'fc_node' )
89- self .fc_edge = tlx .nn .Linear (edge_feats * heads , in_features = edge_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ), name = 'fc_edge' )
88+ self .fc_node = tlx .nn .Linear (out_feats * heads , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ))
89+ self .fc_edge = tlx .nn .Linear (edge_feats * heads , in_features = edge_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ))
9090
9191 self .attn_src = self ._get_weights ('attn_l' , shape = (1 , heads , out_feats ), init = tlx .initializers .XavierNormal (gain = 1.414 ), order = True )
9292 self .attn_dst = self ._get_weights ('attn_r' , shape = (1 , heads , out_feats ), init = tlx .initializers .XavierNormal (gain = 1.414 ), order = True )
@@ -96,7 +96,7 @@ def __init__(self,
9696 self .attn_drop = tlx .nn .Dropout (attn_drop )
9797 self .leaky_relu = tlx .nn .LeakyReLU (negative_slope )
9898
99- self .fc_res = tlx .nn .Linear (heads * out_feats , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ), name = 'fc_res' ) if residual else None
99+ self .fc_res = tlx .nn .Linear (heads * out_feats , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 )) if residual else None
100100
101101 self .activation = activation
102102
0 commit comments