-
Notifications
You must be signed in to change notification settings - Fork 391
/
coarsening.py
269 lines (218 loc) · 8.05 KB
/
coarsening.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
import numpy as np
import scipy.sparse
def coarsen(A, levels, self_connections=False):
"""
Coarsen a graph, represented by its adjacency matrix A, at multiple
levels.
"""
graphs, parents = metis(A, levels)
perms = compute_perm(parents)
for i, A in enumerate(graphs):
M, M = A.shape
if not self_connections:
A = A.tocoo()
A.setdiag(0)
if i < levels:
A = perm_adjacency(A, perms[i])
A = A.tocsr()
A.eliminate_zeros()
graphs[i] = A
Mnew, Mnew = A.shape
print('Layer {0}: M_{0} = |V| = {1} nodes ({2} added),'
'|E| = {3} edges'.format(i, Mnew, Mnew-M, A.nnz//2))
return graphs, perms[0] if levels > 0 else None
def metis(W, levels, rid=None):
"""
Coarsen a graph multiple times using the METIS algorithm.
INPUT
W: symmetric sparse weight (adjacency) matrix
levels: the number of coarsened graphs
OUTPUT
graph[0]: original graph of size N_1
graph[2]: coarser graph of size N_2 < N_1
graph[levels]: coarsest graph of Size N_levels < ... < N_2 < N_1
parents[i] is a vector of size N_i with entries ranging from 1 to N_{i+1}
which indicate the parents in the coarser graph[i+1]
nd_sz{i} is a vector of size N_i that contains the size of the supernode in the graph{i}
NOTE
if "graph" is a list of length k, then "parents" will be a list of length k-1
"""
N, N = W.shape
if rid is None:
rid = np.random.permutation(range(N))
parents = []
degree = W.sum(axis=0) - W.diagonal()
graphs = []
graphs.append(W)
#supernode_size = np.ones(N)
#nd_sz = [supernode_size]
#count = 0
#while N > maxsize:
for _ in range(levels):
#count += 1
# CHOOSE THE WEIGHTS FOR THE PAIRING
# weights = ones(N,1) # metis weights
weights = degree # graclus weights
# weights = supernode_size # other possibility
weights = np.array(weights).squeeze()
# PAIR THE VERTICES AND CONSTRUCT THE ROOT VECTOR
idx_row, idx_col, val = scipy.sparse.find(W)
perm = np.argsort(idx_row)
rr = idx_row[perm]
cc = idx_col[perm]
vv = val[perm]
cluster_id = metis_one_level(rr,cc,vv,rid,weights) # rr is ordered
parents.append(cluster_id)
# TO DO
# COMPUTE THE SIZE OF THE SUPERNODES AND THEIR DEGREE
#supernode_size = full( sparse(cluster_id, ones(N,1) , supernode_size ) )
#print(cluster_id)
#print(supernode_size)
#nd_sz{count+1}=supernode_size;
# COMPUTE THE EDGES WEIGHTS FOR THE NEW GRAPH
nrr = cluster_id[rr]
ncc = cluster_id[cc]
nvv = vv
Nnew = cluster_id.max() + 1
# CSR is more appropriate: row,val pairs appear multiple times
W = scipy.sparse.csr_matrix((nvv,(nrr,ncc)), shape=(Nnew,Nnew))
W.eliminate_zeros()
# Add new graph to the list of all coarsened graphs
graphs.append(W)
N, N = W.shape
# COMPUTE THE DEGREE (OMIT OR NOT SELF LOOPS)
degree = W.sum(axis=0)
#degree = W.sum(axis=0) - W.diagonal()
# CHOOSE THE ORDER IN WHICH VERTICES WILL BE VISTED AT THE NEXT PASS
#[~, rid]=sort(ss); # arthur strategy
#[~, rid]=sort(supernode_size); # thomas strategy
#rid=randperm(N); # metis/graclus strategy
ss = np.array(W.sum(axis=0)).squeeze()
rid = np.argsort(ss)
return graphs, parents
# Coarsen a graph given by rr,cc,vv. rr is assumed to be ordered
def metis_one_level(rr,cc,vv,rid,weights):
nnz = rr.shape[0]
N = rr[nnz-1] + 1
marked = np.zeros(N, np.bool)
rowstart = np.zeros(N, np.int32)
rowlength = np.zeros(N, np.int32)
cluster_id = np.zeros(N, np.int32)
oldval = rr[0]
count = 0
clustercount = 0
for ii in range(nnz):
rowlength[count] = rowlength[count] + 1
if rr[ii] > oldval:
oldval = rr[ii]
rowstart[count+1] = ii
count = count + 1
for ii in range(N):
tid = rid[ii]
if not marked[tid]:
wmax = 0.0
rs = rowstart[tid]
marked[tid] = True
bestneighbor = -1
for jj in range(rowlength[tid]):
nid = cc[rs+jj]
if marked[nid]:
tval = 0.0
else:
tval = vv[rs+jj] * (1.0/weights[tid] + 1.0/weights[nid])
if tval > wmax:
wmax = tval
bestneighbor = nid
cluster_id[tid] = clustercount
if bestneighbor > -1:
cluster_id[bestneighbor] = clustercount
marked[bestneighbor] = True
clustercount += 1
return cluster_id
def compute_perm(parents):
"""
Return a list of indices to reorder the adjacency and data matrices so
that the union of two neighbors from layer to layer forms a binary tree.
"""
# Order of last layer is random (chosen by the clustering algorithm).
indices = []
if len(parents) > 0:
M_last = max(parents[-1]) + 1
indices.append(list(range(M_last)))
for parent in parents[::-1]:
#print('parent: {}'.format(parent))
# Fake nodes go after real ones.
pool_singeltons = len(parent)
indices_layer = []
for i in indices[-1]:
indices_node = list(np.where(parent == i)[0])
assert 0 <= len(indices_node) <= 2
#print('indices_node: {}'.format(indices_node))
# Add a node to go with a singelton.
if len(indices_node) is 1:
indices_node.append(pool_singeltons)
pool_singeltons += 1
#print('new singelton: {}'.format(indices_node))
# Add two nodes as children of a singelton in the parent.
elif len(indices_node) is 0:
indices_node.append(pool_singeltons+0)
indices_node.append(pool_singeltons+1)
pool_singeltons += 2
#print('singelton childrens: {}'.format(indices_node))
indices_layer.extend(indices_node)
indices.append(indices_layer)
# Sanity checks.
for i,indices_layer in enumerate(indices):
M = M_last*2**i
# Reduction by 2 at each layer (binary tree).
assert len(indices[0] == M)
# The new ordering does not omit an indice.
assert sorted(indices_layer) == list(range(M))
return indices[::-1]
assert (compute_perm([np.array([4,1,1,2,2,3,0,0,3]),np.array([2,1,0,1,0])])
== [[3,4,0,9,1,2,5,8,6,7,10,11],[2,4,1,3,0,5],[0,1,2]])
def perm_data(x, indices):
"""
Permute data matrix, i.e. exchange node ids,
so that binary unions form the clustering tree.
"""
if indices is None:
return x
N, M = x.shape
Mnew = len(indices)
assert Mnew >= M
xnew = np.empty((N, Mnew))
for i,j in enumerate(indices):
# Existing vertex, i.e. real data.
if j < M:
xnew[:,i] = x[:,j]
# Fake vertex because of singeltons.
# They will stay 0 so that max pooling chooses the singelton.
# Or -infty ?
else:
xnew[:,i] = np.zeros(N)
return xnew
def perm_adjacency(A, indices):
"""
Permute adjacency matrix, i.e. exchange node ids,
so that binary unions form the clustering tree.
"""
if indices is None:
return A
M, M = A.shape
Mnew = len(indices)
assert Mnew >= M
A = A.tocoo()
# Add Mnew - M isolated vertices.
if Mnew > M:
rows = scipy.sparse.coo_matrix((Mnew-M, M), dtype=np.float32)
cols = scipy.sparse.coo_matrix((Mnew, Mnew-M), dtype=np.float32)
A = scipy.sparse.vstack([A, rows])
A = scipy.sparse.hstack([A, cols])
# Permute the rows and the columns.
perm = np.argsort(indices)
A.row = np.array(perm)[A.row]
A.col = np.array(perm)[A.col]
# assert np.abs(A - A.T).mean() < 1e-9
assert type(A) is scipy.sparse.coo.coo_matrix
return A