10
10
"""
11
11
12
12
import network3
13
- from network3 import Network
13
+ from network3 import sigmoid , tanh , ReLU , Network
14
14
from network3 import ConvPoolLayer , FullyConnectedLayer , SoftmaxLayer
15
15
training_data , validation_data , test_data = network3 .load_data_shared ()
16
16
mini_batch_size = 10
@@ -44,17 +44,20 @@ def omit_FC():
44
44
SoftmaxLayer (n_in = 20 * 12 * 12 , n_out = 10 )], mini_batch_size )
45
45
net .SGD (training_data , 60 , mini_batch_size , 0.1 , validation_data , test_data )
46
46
47
- def dbl_conv ():
47
+ def dbl_conv (activation_fn = sigmoid ):
48
48
for j in range (3 ):
49
49
print "Conv + Conv + FC architecture"
50
50
net = Network ([
51
51
ConvPoolLayer (image_shape = (mini_batch_size , 1 , 28 , 28 ),
52
52
filter_shape = (20 , 1 , 5 , 5 ),
53
- poolsize = (2 , 2 )),
53
+ poolsize = (2 , 2 ),
54
+ activation_fn = activation_fn ),
54
55
ConvPoolLayer (image_shape = (mini_batch_size , 20 , 12 , 12 ),
55
56
filter_shape = (40 , 20 , 5 , 5 ),
56
- poolsize = (2 , 2 )),
57
- FullyConnectedLayer (n_in = 40 * 4 * 4 , n_out = 100 ),
57
+ poolsize = (2 , 2 ),
58
+ activation_fn = activation_fn ),
59
+ FullyConnectedLayer (
60
+ n_in = 40 * 4 * 4 , n_out = 100 , activation_fn = activation_fn ),
58
61
SoftmaxLayer (n_in = 100 , n_out = 10 )], mini_batch_size )
59
62
net .SGD (training_data , 60 , mini_batch_size , 0.1 , validation_data , test_data )
60
63
@@ -73,24 +76,8 @@ def regularized_dbl_conv():
73
76
SoftmaxLayer (n_in = 100 , n_out = 10 )], mini_batch_size )
74
77
net .SGD (training_data , 60 , mini_batch_size , 0.1 , validation_data , test_data , lmbda = lmbda )
75
78
76
- def dbl_conv_tanh ():
77
- for j in range (3 ):
78
- print "Conv + Conv + FC, using tanh, trial %s" % j
79
- net = Network ([
80
- ConvPoolLayer (image_shape = (mini_batch_size , 1 , 28 , 28 ),
81
- filter_shape = (20 , 1 , 5 , 5 ),
82
- poolsize = (2 , 2 ),
83
- activation_fn = tanh ),
84
- ConvPoolLayer (image_shape = (mini_batch_size , 20 , 12 , 12 ),
85
- filter_shape = (40 , 20 , 5 , 5 ),
86
- poolsize = (2 , 2 ),
87
- activation_fn = tanh ),
88
- FullyConnectedLayer (n_in = 40 * 4 * 4 , n_out = 100 , activation_fn = tanh ),
89
- SoftmaxLayer (n_in = 100 , n_out = 10 )], mini_batch_size )
90
- net .SGD (training_data , 60 , mini_batch_size , 0.1 , validation_data , test_data )
91
-
92
79
def dbl_conv_relu ():
93
- for lmbda in [0.00001 , 0.0001 , 0.001 , 0.01 , 0.1 , 1.0 , 10.0 , 100.0 ]:
80
+ for lmbda in [0.0 , 0. 00001 , 0.0001 , 0.001 , 0.01 , 0.1 , 1.0 , 10.0 , 100.0 ]:
94
81
for j in range (3 ):
95
82
print "Conv + Conv + FC num %s, relu, with regularization %s" % (j , lmbda )
96
83
net = Network ([
@@ -126,5 +113,5 @@ def expanded_data():
126
113
FullyConnectedLayer (n_in = 40 * 4 * 4 , n_out = 100 , activation_fn = ReLU ),
127
114
SoftmaxLayer (n_in = 100 , n_out = 10 )], mini_batch_size )
128
115
net .SGD (expanded_training_data , 20 , mini_batch_size , 0.03 ,
129
- validation_data , test_data , lmbda = 1.0 )
116
+ validation_data , test_data , lmbda = 0.1 )
130
117
0 commit comments