-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathbackpropagation.py
51 lines (42 loc) · 1.3 KB
/
backpropagation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
# https://en.wikipedia.org/wiki/Backpropagation
import numpy as np
np.random.seed(42)
# https://en.wikipedia.org/wiki/Sigmoid_function
def sigmoid(x): return 1.0 / (1 + np.exp(-x))
assert(sigmoid(0) == 0.5)
# derivative of sigmoid
def sigmoid_dx(x): return x * (1.0 - x)
assert(sigmoid_dx(0.5) == 0.25)
# input
x = np.array([[0,0,1], [0,1,1], [1,0,1], [1,1,1]])
y = np.array([[0], [1], [1], [0]])
# weights
w_1 = np.random.rand(x.shape[1],4)
w_2 = np.random.rand(4,1)
# output
output = np.zeros(y.shape)
def feedforward(x, w_1, w_2, output):
layer_1 = sigmoid(np.dot(x, w_1))
output = sigmoid(np.dot(layer_1, w_2))
return layer_1, w_1, w_2, output
def backpropagation(layer_1, w_1, w_2, x, y, output):
w_2_dx = np.dot(
np.transpose(layer_1),
(2 * (y - output) * sigmoid_dx(output))
)
w_1_dx = np.dot(
np.transpose(x),
(np.dot(2 * (y - output) * sigmoid_dx(output), np.transpose(w_2)) * sigmoid_dx(layer_1))
)
# update weights
w_1 += w_1_dx
w_2 += w_2_dx
return layer_1, w_1, w_2, x, y, output
for i in range(1500):
layer_1, w_1, w_2, output = feedforward(x, w_1, w_2, output)
layer_1, w_1, w_2, x, y, output = backpropagation(layer_1, w_1, w_2, x, y, output)
print(output)
# [[0.01043733]
# [0.97230567]
# [0.97090151]
# [0.03501066]]