forked from ChrisACas/MLProject-Neural_Tigers
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest.py
91 lines (71 loc) · 2.71 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import matplotlib.pyplot as plt
import numpy as np
from MNIST_Dataloader import MNIST_Dataloader
#testing fork
class NeuralNetwok:
def __init__(self, input_size=28*28, output_size=10, h_layers=1, h_neurons_per_layer=128):
self.input_size = input_size
self.output_size = output_size
self.h_layers = h_layers
self.h_neurons_per_layer = h_neurons_per_layer
self.layers = self.init_layers(input_size, h_neurons_per_layer, output_size)
# TODO: implement a programmable amount of hidden layer initialization
def init_layers(self, input_size, h_neurons_per_layer, output_size):
'''
Get layer size info and develop weight array
initialize random weights for each connection to next layer
weight array of output size, in array for every input node
return these weight arrays for each node as layer
'''
layer1 = np.random.uniform(-1.,1.,size=(input_size, h_neurons_per_layer))\
/np.sqrt(input_size * h_neurons_per_layer)
layer2 = np.random.uniform(-1.,1.,size=(h_neurons_per_layer, output_size))\
/np.sqrt(h_neurons_per_layer * output_size)
return [layer1, layer2]
def desired_array_out(self, label):
'''Turn label into desired output array
input label 5
return desire array [0 0 0 0 0 1 0 0 0 0]
'''
desired_array = np.zeros(self.output_size, np.float32)
desired_array[label] = 1
return desired_array
#Sigmoid funstion
def sigmoid(x):
return 1/(np.exp(-x)+1)
#derivative of sigmoid
def d_sigmoid(x):
return (np.exp(-x))/((np.exp(-x)+1)**2)
#Softmax
def softmax(x):
exp_element=np.exp(x-x.max())
return exp_element/np.sum(exp_element,axis=0)
#derivative of softmax
def d_softmax(x):
exp_element=np.exp(x-x.max())
return exp_element/np.sum(exp_element,axis=0)*(1-exp_element/np.sum(exp_element,axis=0))
#forward and backward pass
def forward_backward_pass(x,y):
targets = np.zeros((len(y),10), np.float32)
targets[range(targets.shape[0]),y] = 1
# forward pass
x_l1=x.dot(l1)
x_sigmoid=sigmoid(x_l1)
x_l2=x_sigmoid.dot(l2)
out=softmax(x_l2)
# backpropogation l2
error=2*(out-targets)/out.shape[0]*d_softmax(x_l2)
update_l2=x_sigmoid.T@error
#backpropogation l1
error=((l2).dot(error.T)).T*d_sigmoid(x_l1)
update_l1=x.T@error
return out,update_l1,update_l2
def main():
# dataloader = MNIST_Dataloader()
# dataloader.show_images(5, 5)
# dataloader.simple_show()
nn = NeuralNetwok()
print(nn.desired_array_out([3]))
print(nn.desired_array_out([9]))
if __name__=="__main__":
main()