Skip to content

Commit 9bcaa8b

Browse files
committed
Fixing the basic relu results, adding the expanded data
1 parent 382750d commit 9bcaa8b

File tree

1 file changed

+24
-18
lines changed

1 file changed

+24
-18
lines changed

src/conv.py

Lines changed: 24 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -89,33 +89,39 @@ def dbl_conv_tanh():
8989
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
9090
net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
9191

92-
def dbl_conv_relu():
93-
for j in range(3):
94-
print "Conv + Conv + FC, using ReLU and regularization"
95-
net = Network([
96-
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
97-
filter_shape=(20, 1, 5, 5),
98-
poolsize=(2, 2),
99-
activation_fn=tanh),
100-
ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
101-
filter_shape=(40, 20, 5, 5),
102-
poolsize=(2, 2),
103-
activation_fn=tanh),
104-
FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=tanh),
105-
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
106-
net.SGD(training_data, 60, mini_batch_size, 0.03, validation_data, test_data, lmbda=1.0)
107-
10892
def dbl_conv_relu():
10993
for lmbda in [0.00001, 0.0001, 0.001, 0.01, 0.1, 1.0, 10.0, 100.0]:
11094
for j in range(3):
11195
print "Conv + Conv + FC num %s, relu, with regularization %s" % (j, lmbda)
11296
net = Network([
11397
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
11498
filter_shape=(20, 1, 5, 5),
115-
poolsize=(2, 2), activation_fn=ReLU),
99+
poolsize=(2, 2),
100+
activation_fn=ReLU),
116101
ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
117102
filter_shape=(40, 20, 5, 5),
118-
poolsize=(2, 2), activation_fn=ReLU),
103+
poolsize=(2, 2),
104+
activation_fn=ReLU),
119105
FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=ReLU),
120106
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
121107
net.SGD(training_data, 60, mini_batch_size, 0.03, validation_data, test_data, lmbda=lmbda)
108+
109+
def expanded_data():
110+
expanded_training_data, _, _ = network3.load_data_shared(
111+
"../data/mnist_expanded.pkl.gz")
112+
for j in range(3):
113+
print "Training with expanded data, run num %s" % j
114+
net = Network([
115+
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
116+
filter_shape=(20, 1, 5, 5),
117+
poolsize=(2, 2),
118+
activation_fn=ReLU),
119+
ConvPoolLayer(image_shape=(mini_batch_size, 20, 12, 12),
120+
filter_shape=(40, 20, 5, 5),
121+
poolsize=(2, 2),
122+
activation_fn=ReLU),
123+
FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=ReLU),
124+
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
125+
net.SGD(expanded_training_data, 20, mini_batch_size, 0.03,
126+
validation_data, test_data, lmbda=1.0)
127+

0 commit comments

Comments
 (0)