-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmltemplate.py
73 lines (63 loc) · 2.21 KB
/
mltemplate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
from dense import Dense
from tanh import Tanh
from losses import mse, mse_prime
import numpy as np
def test_outputs(inputs, network):
for i in inputs:
inp=np.reshape(i, (len(i), 1))
for layer in network:
inp = layer.forward(inp)
print(f"input: {"".join(str (j) for j in i[:2])} + {"".join(str(j) for j in i[2:])}\noutput: {inp.flatten()}")
inputs=[[0,0,0,0],[0,0,0,1],[0,0,1,0],[0,0,1,1],
[0,1,0,0],[0,1,0,1],[0,1,1,0],[0,1,1,1],
[1,0,0,0],[1,0,0,1],[1,0,1,0],[1,0,1,1],
[1,1,0,0],[1,1,0,1],[1,1,1,0],[1,1,1,1]]
outputs = [[0,0,0],[0,0,1],[0,1,0],[0,1,1],
[0,0,1],[0,1,0],[0,1,1],[1,0,0],
[0,1,0],[0,1,1],[1,0,0],[1,0,1],
[0,1,1],[1,0,0],[1,0,1],[1,1,0]]
# NOTE: X is possible inputs
# Must be matched to Y
# Parameter 2 must be in the form (a, b, c)
# a is the number of corresponing outputs
# b is the number of data points per input
# c = 1
X = np.reshape(inputs,(len(inputs), len(inputs[0]), 1))
# NOTE: Y is DESIRED outputs
# Must be matched to X
# Parameter 2 must be in the form (a, b, c)
# a is the number of corresponing outputs
# b, c = 1
Y = np.reshape(outputs,(len(outputs), len(outputs[0]), 1))
# NOTE: Dense layer always goes first and must have even number of layers
# for layer 1, the first number must be the number of inputs of data
# for all other dense layers, the first number must be the same as the second number of the last layer
# for the last dense layer, the second number should be 1
network = [
Dense(len(inputs[0]),5),
Tanh(),
Dense(5,4),
Tanh(),
Dense(4,len(outputs[0])),
Tanh()
]
epochs = 10000
learning_rate = 0.1
for e in range(epochs):
error = 0
for x,y in zip(X,Y):
output = x
for layer in network:
output = layer.forward(output)
error += mse(y,output)
grad = mse_prime(y,output)
for layer in reversed(network):
grad = layer.backward(grad, learning_rate)
error /= len(X)
print('%d/%d, error=%f' % (e+1,epochs,error))
# NOTE: Testing the model
# Define the new input
# must be the shape of the values of X
# param 2 must be in the form (a, b)
# where a is the number of values and b = 1
test_outputs(inputs,network)