@@ -111,10 +111,10 @@ def __init__(self, layers, mini_batch_size):
111
111
self .x = T .matrix ("x" )
112
112
self .y = T .ivector ("y" )
113
113
init_layer = self .layers [0 ]
114
- init_layer .set_inpt (self .x , mini_batch_size )
114
+ init_layer .set_inpt (self .x , self . mini_batch_size )
115
115
for j in xrange (1 , len (self .layers )):
116
116
prev_layer , layer = self .layers [j - 1 ], self .layers [j ]
117
- layer .set_inpt (prev_layer .output , mini_batch_size )
117
+ layer .set_inpt (prev_layer .output , self . mini_batch_size )
118
118
self .output = self .layers [- 1 ].output
119
119
120
120
def SGD (self , training_data , epochs , mini_batch_size , eta ,
@@ -261,8 +261,7 @@ def __init__(self, n_in, n_out, activation_fn=sigmoid):
261
261
# Initialize weights and biases
262
262
self .w = theano .shared (
263
263
np .asarray (
264
- np .random .normal (
265
- loc = 0.0 , scale = np .sqrt (1.0 / n_out ), size = (n_in , n_out )),
264
+ np .random .normal (loc = 0.0 , scale = np .sqrt (1.0 / n_out ), size = (n_in , n_out )),
266
265
dtype = theano .config .floatX ),
267
266
name = 'w' , borrow = True )
268
267
self .b = theano .shared (
@@ -272,9 +271,8 @@ def __init__(self, n_in, n_out, activation_fn=sigmoid):
272
271
self .params = [self .w , self .b ]
273
272
274
273
def set_inpt (self , inpt , mini_batch_size ):
275
- self .mini_batch_size = mini_batch_size
276
- self .inpt = inpt .reshape ((self .mini_batch_size , self .n_in ))
277
- self .output = self .activation_fn (T .dot (inpt , self .w )+ self .b )
274
+ self .inpt = inpt .reshape ((mini_batch_size , self .n_in ))
275
+ self .output = self .activation_fn (T .dot (self .inpt , self .w ) + self .b )
278
276
279
277
class SoftmaxLayer ():
280
278
@@ -293,9 +291,8 @@ def __init__(self, n_in, n_out):
293
291
self .params = [self .w , self .b ]
294
292
295
293
def set_inpt (self , inpt , mini_batch_size ):
296
- self .mini_batch_size = mini_batch_size
297
- self .inpt = inpt .reshape ((self .mini_batch_size , self .n_in ))
298
- self .output = softmax (T .dot (self .inpt , self .w )+ self .b )
294
+ self .inpt = inpt .reshape ((mini_batch_size , self .n_in ))
295
+ self .output = softmax (T .dot (self .inpt , self .w ) + self .b )
299
296
self .y_out = T .argmax (self .output , axis = 1 )
300
297
301
298
def accuracy (self , y ):
@@ -307,3 +304,4 @@ def accuracy(self, y):
307
304
def size (data ):
308
305
"Return the size of the dataset `data`."
309
306
return data [0 ].get_value (borrow = True ).shape [0 ]
307
+
0 commit comments