-
Notifications
You must be signed in to change notification settings - Fork 0
/
LDNN.c
109 lines (95 loc) · 2.9 KB
/
LDNN.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#include"LDNN.h"
#define wijk network->weight[i][j][k]
#define wij network->weight[i][j]
#define bij network->bias[i][j]
network_t *make_network() {
int N = settings.N;
int M = settings.M;
int DIM = settings.DIM;
// network + weights + bias
// sizeof(network) + N*M*dim*sizeof(PRECISION) + N*M*sizeof(PRECISION)
network_t* network = (network_t*) malloc(sizeof(network_t));
network->N = N;
network->M = M;
vector_t tmp = malloc(N*M*DIM*sizeof(PRECISION));
network->weight = malloc(N*sizeof(vector_t *));
network->bias = malloc(N*sizeof(vector_t ));
for(int i=0; i<N; i++) {
network->weight[i] = malloc(M*sizeof(vector_t));
network->bias[i] = malloc(M*sizeof(PRECISION));
for(int j=0; j<M; j++) {
network->weight[i][j] = tmp;
tmp += DIM;
}
}
return network;
}
void destroy_network(network_t *network) {
// not impl
}
void vector_centroid(vector_t centroid, vector_t *data, int start, int amount) {
for(int i=0; i<settings.DIM; i++)
centroid[i] = 0;
for(int i=start; i<start+amount; i++)
vector_add(centroid, data[i]);
PRECISION len = vector_length(centroid);
vector_scale(centroid, 1/(PRECISION)amount);
}
void init_network(network_t *network, int neg_size, vector_t *neg, int pos_size, vector_t *pos) {
int N = network->N;
int M = network->M;
vector_t pos_centroid = malloc(settings.DIM*sizeof(PRECISION));
vector_t neg_centroid = malloc(settings.DIM*sizeof(PRECISION));
for(int i=0; i<settings.N; i++) {
for(int j=0; j<settings.M; j++) {
vector_centroid(pos_centroid, pos, i*(pos_size/N), pos_size/N);
vector_centroid(neg_centroid, neg, j*(neg_size/M), neg_size/M);
vector_copy(pos_centroid, wij);
vector_sub(wij, neg_centroid);
vector_normalize(wij);
vector_add(pos_centroid, neg_centroid);
vector_scale(pos_centroid, 0.5);
bij = vector_scalar_prod(wij, pos_centroid);
}
}
}
PRECISION halfspace(network_t *network, int i, int j, vector_t v) {
PRECISION sum = 0;
for(int k=0; k<settings.DIM; k++) {
sum += wijk * v[k];
}
return 1/(1+exp(-sum+bij));
}
PRECISION polytope(network_t *network, int i, vector_t testvec) {
PRECISION prod = 1;
for(int j=0; j<network->M; j++) {
prod *= halfspace(network, i, j, testvec);
}
return prod;
}
PRECISION classify(network_t *network, vector_t testvec) {
PRECISION prod = 1;
for(int i=0; i<network->N; i++) {
prod *= 1-polytope(network, i, testvec);
}
return 1-prod;
}
void gradient_train(network_t *network, int class, vector_t x) {
for(int i=0; i<settings.N; i++) {
for(int j=0; j<settings.M; j++) {
PRECISION diff_bias = 2*(classify(network, x) - class);
for(int r=0; r<settings.N; r++) {
if(i==r)
continue;
diff_bias *= (1-polytope(network, r, x));
}
diff_bias *= (1-polytope(network, i, x)) * (1-halfspace(network, i, j, x));
diff_bias *= settings.alpha;
vector_scale(x, diff_bias);
//apply diff
vector_sub(wij, x);
bij -= diff_bias;
}
}
//PRECISION diff_weight
}