-
Notifications
You must be signed in to change notification settings - Fork 5
/
model.py
59 lines (40 loc) · 1.69 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import torch
from torch import nn
from torch.nn import functional as F
from layer import GraphConvolution
from config import args
class GCN(nn.Module):
def __init__(self, input_dim, output_dim, num_features_nonzero):
super(GCN, self).__init__()
self.input_dim = input_dim # 1433
self.output_dim = output_dim
print('input dim:', input_dim)
print('output dim:', output_dim)
print('num_features_nonzero:', num_features_nonzero)
self.layers = nn.Sequential(GraphConvolution(self.input_dim, args.hidden, num_features_nonzero,
activation=F.relu,
dropout=0.5,
is_sparse_inputs=True),
GraphConvolution(args.hidden, 32, num_features_nonzero,
activation=F.relu,
dropout=0.5,
is_sparse_inputs=False),
)
self.out = nn.Linear(32, output_dim)
self.dropout = nn.Dropout(0.5)
def forward(self, inputs):
x, support = inputs
x = self.layers((x, support))
x = F.relu(x[0])
x = self.out(x)
return x
def l2_loss(self):
layer = self.layers.children()
layer = next(iter(layer))
loss = None
for p in layer.parameters():
if loss is None:
loss = p.pow(2).sum()
else:
loss += p.pow(2).sum()
return loss