@@ -55,30 +55,6 @@ def ReLU(z): return T.maximum(0, z)
55
55
except : pass # it's already set
56
56
theano .config .floatX = 'float32'
57
57
58
- def example (mini_batch_size = 10 ):
59
- print ("Loading the MNIST data" )
60
- training_data , validation_data , test_data = load_data_shared ()
61
- print ("Building the network" )
62
- net = create_net (10 )
63
- print ("Training the network" )
64
- try :
65
- net .SGD (training_data , 200 , mini_batch_size , 0.1 ,
66
- validation_data , test_data , lmbda = 1.0 )
67
- except KeyboardInterrupt :
68
- pass
69
- return net
70
-
71
- def create_net (mini_batch_size = 10 , activation_fn = tanh ):
72
- return Network (
73
- [ConvPoolLayer (image_shape = (mini_batch_size , 1 , 28 , 28 ), filter_shape = (20 , 1 , 5 , 5 ), poolsize = (2 , 2 ), activation_fn = activation_fn ),
74
- #ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12), filter_shape=(40, 20, 5, 5), poolsize=(2, 2), activation_fn=activation_fn),
75
- #FullyConnectedLayer(n_in=40*4*4, n_out=100, mini_batch_size=mini_batch_size, activation_fn=activation_fn),
76
- #FullyConnectedLayer(n_in=784, n_out=100, mini_batch_size=mini_batch_size, activation_fn=activation_fn),
77
- FullyConnectedLayer (n_in = 20 * 12 * 12 , n_out = 100 ),
78
- #FullyConnectedLayer(n_in=100, n_out=100, mini_batch_size=mini_batch_size, activation_fn=activation_fn),
79
- SoftmaxLayer (n_in = 100 , n_out = 10 )], mini_batch_size )
80
- #SoftmaxLayer(n_in=20*12*12, n_out=10)], mini_batch_size)
81
-
82
58
#### Load the MNIST data
83
59
def load_data_shared (filename = "../data/mnist.pkl.gz" ):
84
60
f = gzip .open (filename , 'rb' )
0 commit comments