Skip to content

Commit 5ecc7a1

Browse files
committed
Fixing conflicts
2 parents 07745a0 + 9d87420 commit 5ecc7a1

File tree

1 file changed

+5
-10
lines changed

1 file changed

+5
-10
lines changed

code/network_basic.py

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -93,11 +93,11 @@ def backprop(self, training_data, n, eta):
9393
nabla_b[-1] += delta
9494
nabla_w[-1] += np.dot(delta, activations[-2].transpose())
9595
# Note that the variable l in the loop below is used a
96-
# little differently to the book. Here, l = 1 means the
97-
# last layer of neurons, l = 2 is the second-last layer,
98-
# and so on. It's a renumbering of the scheme used in the
99-
# book, used to take advantage of the fact that Python can
100-
# use negative indices in lists.
96+
# little differently to the notation in Chapter 2 of the book.
97+
# Here, l = 1 means the last layer of neurons, l = 2 is the
98+
# second-last layer, and so on. It's a renumbering of the
99+
# scheme used in the book, used here to take advantage of the
100+
# fact that Python can use negative indices in lists.
101101
for l in xrange(2, self.num_layers):
102102
z = zs[-l]
103103
spv = sigmoid_prime_vec(z)
@@ -116,11 +116,6 @@ def evaluate(self, test_data):
116116
for (x, y) in test_data]
117117
return sum(int(x == y) for (x, y) in test_results)
118118

119-
def cost(self, x, y):
120-
"""Return the quadratic cost associated to the network, with
121-
input ``x`` and desired output ``y``."""
122-
return np.sum((self.feedforward(x)-y)**2)/2.0
123-
124119
def cost_derivative(self, output_activations, y):
125120
"""Return the vector of partial derivatives \partial C_x /
126121
\partial a for the output activations, ``a``."""

0 commit comments

Comments
 (0)