-
Notifications
You must be signed in to change notification settings - Fork 16
/
transductive_classifier.py
executable file
·63 lines (45 loc) · 1.86 KB
/
transductive_classifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import argparse
from utils.classifier import Classifier
from trainer import Trainer
from utils import process
def parse_args():
'''
Parses the arguments.
'''
parser = argparse.ArgumentParser(description="Run gate.")
parser.add_argument('--dataset', nargs='?', default='cora',
help='Input dataset')
parser.add_argument('--lr', type=float, default=0.0001,
help='Learning rate. Default is 0.001.')
parser.add_argument('--n-epochs', default=200, type=int,
help='Number of epochs')
parser.add_argument('--hidden-dims', type=list, nargs='+', default=[512, 512],
help='Number of dimensions.')
parser.add_argument('--lambda-', default=1, type=float,
help='Parameter controlling the contribution of edge reconstruction in the loss function.')
parser.add_argument('--dropout', default=0.0, type=float,
help='Dropout.')
parser.add_argument('--gradient_clipping', default=5.0, type=float,
help='gradient clipping')
return parser.parse_args()
def main(args):
'''
Pipeline for Graph Attention Autoencoder.
'''
G, X, Y, idx_train, idx_val, idx_test = process.load_data(args.dataset)
# add feature dimension size to the beginning of hidden_dims
feature_dim = X.shape[1]
args.hidden_dims = [feature_dim] + args.hidden_dims
# prepare the data
G_tf, S, R = process.prepare_graph_data(G)
# Train the Model
trainer = Trainer(args)
trainer(G_tf, X, S, R)
embeddings, attentions = trainer.infer(G_tf, X, S, R)
# Evaluate the quality of embeddings
classifier = Classifier(vectors=embeddings)
f1s = classifier(idx_train, idx_test, idx_val, Y, seed=0)
print f1s
if __name__ == "__main__":
args = parse_args()
main(args)