Skip to content

Commit b0748f0

Browse files
committed
Fixing inpt bug
1 parent f32881f commit b0748f0

File tree

1 file changed

+8
-10
lines changed

1 file changed

+8
-10
lines changed

src/network3.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -111,10 +111,10 @@ def __init__(self, layers, mini_batch_size):
111111
self.x = T.matrix("x")
112112
self.y = T.ivector("y")
113113
init_layer = self.layers[0]
114-
init_layer.set_inpt(self.x, mini_batch_size)
114+
init_layer.set_inpt(self.x, self.mini_batch_size)
115115
for j in xrange(1, len(self.layers)):
116116
prev_layer, layer = self.layers[j-1], self.layers[j]
117-
layer.set_inpt(prev_layer.output, mini_batch_size)
117+
layer.set_inpt(prev_layer.output, self.mini_batch_size)
118118
self.output = self.layers[-1].output
119119

120120
def SGD(self, training_data, epochs, mini_batch_size, eta,
@@ -261,8 +261,7 @@ def __init__(self, n_in, n_out, activation_fn=sigmoid):
261261
# Initialize weights and biases
262262
self.w = theano.shared(
263263
np.asarray(
264-
np.random.normal(
265-
loc=0.0, scale=np.sqrt(1.0/n_out), size=(n_in, n_out)),
264+
np.random.normal(loc=0.0, scale=np.sqrt(1.0/n_out), size=(n_in, n_out)),
266265
dtype=theano.config.floatX),
267266
name='w', borrow=True)
268267
self.b = theano.shared(
@@ -272,9 +271,8 @@ def __init__(self, n_in, n_out, activation_fn=sigmoid):
272271
self.params = [self.w, self.b]
273272

274273
def set_inpt(self, inpt, mini_batch_size):
275-
self.mini_batch_size = mini_batch_size
276-
self.inpt = inpt.reshape((self.mini_batch_size, self.n_in))
277-
self.output = self.activation_fn(T.dot(inpt, self.w)+self.b)
274+
self.inpt = inpt.reshape((mini_batch_size, self.n_in))
275+
self.output = self.activation_fn(T.dot(self.inpt, self.w) + self.b)
278276

279277
class SoftmaxLayer():
280278

@@ -293,9 +291,8 @@ def __init__(self, n_in, n_out):
293291
self.params = [self.w, self.b]
294292

295293
def set_inpt(self, inpt, mini_batch_size):
296-
self.mini_batch_size = mini_batch_size
297-
self.inpt = inpt.reshape((self.mini_batch_size, self.n_in))
298-
self.output = softmax(T.dot(self.inpt, self.w)+self.b)
294+
self.inpt = inpt.reshape((mini_batch_size, self.n_in))
295+
self.output = softmax(T.dot(self.inpt, self.w) + self.b)
299296
self.y_out = T.argmax(self.output, axis=1)
300297

301298
def accuracy(self, y):
@@ -307,3 +304,4 @@ def accuracy(self, y):
307304
def size(data):
308305
"Return the size of the dataset `data`."
309306
return data[0].get_value(borrow=True).shape[0]
307+

0 commit comments

Comments
 (0)