Skip to content

Commit f5fbe52

Browse files
committed
Add sigmoid derivative function
1 parent 93c22bb commit f5fbe52

File tree

4 files changed

+30
-9
lines changed

4 files changed

+30
-9
lines changed

learning.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,8 @@
33
from utils import (
44
removeall, unique, product, mode, argmax, argmax_random_tie, isclose,
55
dotproduct, vector_add, scalar_vector_product, weighted_sample_with_replacement,
6-
weighted_sampler, num_or_str, normalize, clip, sigmoid, print_table, DataFile
6+
weighted_sampler, num_or_str, normalize, clip, sigmoid, print_table,
7+
DataFile, sigmoid_derivative
78
)
89

910
import copy
@@ -541,6 +542,10 @@ def random_weights(min_value, max_value, num_weights):
541542
return [random.uniform(min_value, max_value) for i in range(num_weights)]
542543

543544

545+
def sigmoid_derivative_value(node):
546+
return node.value * (1 - node.value)
547+
548+
544549
def BackPropagationLearner(dataset, net, learning_rate, epochs):
545550
"""[Figure 18.23] The back-propagation algorithm for multilayer network"""
546551
# Initialise weights
@@ -558,7 +563,9 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
558563
idx_t = [dataset.target]
559564
idx_i = dataset.inputs
560565
n_layers = len(net)
566+
# output nodes
561567
o_nodes = net[-1]
568+
# input nodes
562569
i_nodes = net[0]
563570

564571
for epoch in range(epochs):
@@ -582,10 +589,10 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
582589

583590
# Compute outer layer delta
584591
o_units = len(o_nodes)
585-
err = [t_val[i] - o_nodes[i].value
586-
for i in range(o_units)]
587-
delta[-1] = [(o_nodes[i].value) * (1 - o_nodes[i].value) *
588-
(err[i]) for i in range(o_units)]
592+
# Error for the MSE cost function
593+
err = [t_val[i] - o_nodes[i].value for i in range(o_units)]
594+
# The activation function used is the sigmoid function
595+
delta[-1] = [sigmoid_derivative(o_nodes[i].value) * err[i] for i in range(o_units)]
589596

590597
# Backward pass
591598
h_layers = n_layers - 2
@@ -594,11 +601,9 @@ def BackPropagationLearner(dataset, net, learning_rate, epochs):
594601
h_units = len(layer)
595602
nx_layer = net[i+1]
596603
# weights from each ith layer node to each i + 1th layer node
597-
w = [[node.weights[k] for node in nx_layer]
598-
for k in range(h_units)]
604+
w = [[node.weights[k] for node in nx_layer] for k in range(h_units)]
599605

600-
delta[i] = [(layer[j].value) * (1 - layer[j].value) *
601-
dotproduct(w[j], delta[i+1])
606+
delta[i] = [sigmoid_derivative(layer[j].value) * dotproduct(w[j], delta[i+1])
602607
for j in range(h_units)]
603608

604609
# Update weights

tests/test_learning.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,3 +99,5 @@ def test_random_weights():
9999

100100
for weight in test_weights:
101101
assert weight >= min_value and weight <= max_value
102+
103+

tests/test_utils.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,16 @@ def test_sigmoid():
136136
assert isclose(0.2689414213699951, sigmoid(-1))
137137

138138

139+
def test_sigmoid_derivative():
140+
value = 1
141+
142+
assert sigmoid_derivative(value) == 0
143+
144+
value = 3
145+
146+
assert sigmoid_derivative(value) == -6
147+
148+
139149
def test_step():
140150
assert step(1) == step(0.5) == 1
141151
assert step(0) == 1

utils.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -249,6 +249,10 @@ def clip(x, lowest, highest):
249249
return max(lowest, min(x, highest))
250250

251251

252+
def sigmoid_derivative(value):
253+
return value * (1 - value)
254+
255+
252256
def sigmoid(x):
253257
"""Return activation value of x with sigmoid function"""
254258
return 1/(1 + math.exp(-x))

0 commit comments

Comments
 (0)