99from  keras .preprocessing  import  sequence 
1010
1111from  utils4e  import  (Sigmoid , dot_product , softmax1D , conv1D , gaussian_kernel , element_wise_product , vector_add ,
12-                      random_weights , scalar_vector_product , matrix_multiplication , map_vector , mse_loss )
12+                      random_weights , scalar_vector_product , matrix_multiplication , map_vector , mean_squared_error_loss )
1313
1414
1515class  Node :
1616    """ 
1717    A node in a computational graph contains the pointer to all its parents. 
18-     :param val: value of current node.  
19-     :param parents: a container of all parents of current node.  
18+     :param val: value of current node 
19+     :param parents: a container of all parents of current node 
2020    """ 
2121
2222    def  __init__ (self , val = None , parents = None ):
@@ -55,40 +55,40 @@ def forward(self, inputs):
5555        raise  NotImplementedError 
5656
5757
58- class  OutputLayer (Layer ):
59-     """1D softmax output  layer in 19.3.2 """ 
58+ class  InputLayer (Layer ):
59+     """1D input  layer. Layer size is the same as input vector size. """ 
6060
6161    def  __init__ (self , size = 3 ):
6262        super ().__init__ (size )
6363
6464    def  forward (self , inputs ):
65+         """Take each value of the inputs to each unit in the layer.""" 
6566        assert  len (self .nodes ) ==  len (inputs )
66-         res  =  softmax1D (inputs )
67-         for  node , val  in  zip (self .nodes , res ):
68-             node .val  =  val 
69-         return  res 
67+         for  node , inp  in  zip (self .nodes , inputs ):
68+             node .val  =  inp 
69+         return  inputs 
7070
7171
72- class  InputLayer (Layer ):
73-     """1D input  layer. Layer size is the same as input vector size .""" 
72+ class  OutputLayer (Layer ):
73+     """1D softmax output  layer in 19.3.2 .""" 
7474
7575    def  __init__ (self , size = 3 ):
7676        super ().__init__ (size )
7777
7878    def  forward (self , inputs ):
79-         """Take each value of the inputs to each unit in the layer.""" 
8079        assert  len (self .nodes ) ==  len (inputs )
81-         for  node , inp  in  zip (self .nodes , inputs ):
82-             node .val  =  inp 
83-         return  inputs 
80+         res  =  softmax1D (inputs )
81+         for  node , val  in  zip (self .nodes , res ):
82+             node .val  =  val 
83+         return  res 
8484
8585
8686class  DenseLayer (Layer ):
8787    """ 
8888    1D dense layer in a neural network. 
89-     :param in_size: input vector size, int.  
90-     :param out_size: output vector size, int.  
91-     :param activation: activation function,  Activation object.  
89+     :param in_size: (int)  input vector size 
90+     :param out_size: (int)  output vector size 
91+     :param activation: ( Activation object) activation function  
9292    """ 
9393
9494    def  __init__ (self , in_size = 3 , out_size = 3 , activation = None ):
@@ -124,7 +124,7 @@ def __init__(self, size=3, kernel_size=3):
124124            node .weights  =  gaussian_kernel (kernel_size )
125125
126126    def  forward (self , features ):
127-         # each node in layer takes a channel in the features.  
127+         # each node in layer takes a channel in the features 
128128        assert  len (self .nodes ) ==  len (features )
129129        res  =  []
130130        # compute the convolution output of each channel, store it in node.val 
@@ -154,7 +154,8 @@ def forward(self, features):
154154        for  i  in  range (len (self .nodes )):
155155            feature  =  features [i ]
156156            # get the max value in a kernel_size * kernel_size area 
157-             out  =  [max (feature [i :i  +  self .kernel_size ]) for  i  in  range (len (feature ) -  self .kernel_size  +  1 )]
157+             out  =  [max (feature [i :i  +  self .kernel_size ])
158+                    for  i  in  range (len (feature ) -  self .kernel_size  +  1 )]
158159            res .append (out )
159160            self .nodes [i ].val  =  out 
160161        return  res 
@@ -270,13 +271,13 @@ def adam(dataset, net, loss, epochs=1000, rho=(0.9, 0.999), delta=1 / 10 ** 8,
270271
271272def  BackPropagation (inputs , targets , theta , net , loss ):
272273    """ 
273-     The back-propagation algorithm for multilayer networks in only one epoch, to calculate gradients of theta 
274-     :param inputs: a batch of inputs in an array. Each input is an iterable object.  
275-     :param targets: a batch of targets in an array. Each target is an iterable object.  
276-     :param theta: parameters to be updated.  
277-     :param net: a list of predefined layer objects representing their linear sequence.  
278-     :param loss: a predefined loss function taking array of inputs and targets.  
279-     :return: gradients of theta, loss of the input batch.  
274+     The back-propagation algorithm for multilayer networks in only one epoch, to calculate gradients of theta.  
275+     :param inputs: a batch of inputs in an array. Each input is an iterable object 
276+     :param targets: a batch of targets in an array. Each target is an iterable object 
277+     :param theta: parameters to be updated 
278+     :param net: a list of predefined layer objects representing their linear sequence 
279+     :param loss: a predefined loss function taking array of inputs and targets 
280+     :return: gradients of theta, loss of the input batch 
280281    """ 
281282
282283    assert  len (inputs ) ==  len (targets )
@@ -325,9 +326,9 @@ def BackPropagation(inputs, targets, theta, net, loss):
325326class  BatchNormalizationLayer (Layer ):
326327    """Batch normalization layer.""" 
327328
328-     def  __init__ (self , size , epsilon = 0.001 ):
329+     def  __init__ (self , size , eps = 0.001 ):
329330        super ().__init__ (size )
330-         self .epsilon  =  epsilon 
331+         self .eps  =  eps 
331332        # self.weights = [beta, gamma] 
332333        self .weights  =  [0 , 0 ]
333334        self .inputs  =  None 
@@ -341,7 +342,7 @@ def forward(self, inputs):
341342        res  =  []
342343        # get normalized value of each input 
343344        for  i  in  range (len (self .nodes )):
344-             val  =  [(inputs [i ] -  mu ) *  self .weights [0 ] /  np .sqrt (self .epsilon  +  stderr  **  2 ) +  self .weights [1 ]]
345+             val  =  [(inputs [i ] -  mu ) *  self .weights [0 ] /  np .sqrt (self .eps  +  stderr  **  2 ) +  self .weights [1 ]]
345346            res .append (val )
346347            self .nodes [i ].val  =  val 
347348        return  res 
@@ -375,7 +376,7 @@ def NeuralNetLearner(dataset, hidden_layer_sizes=None, learning_rate=0.01, epoch
375376    raw_net .append (DenseLayer (hidden_input_size , output_size ))
376377
377378    # update parameters of the network 
378-     learned_net  =  optimizer (dataset , raw_net , mse_loss , epochs , l_rate = learning_rate ,
379+     learned_net  =  optimizer (dataset , raw_net , mean_squared_error_loss , epochs , l_rate = learning_rate ,
379380                            batch_size = batch_size , verbose = verbose )
380381
381382    def  predict (example ):
@@ -394,7 +395,7 @@ def predict(example):
394395    return  predict 
395396
396397
397- def  PerceptronLearner (dataset , learning_rate = 0.01 , epochs = 100 , verbose = None ):
398+ def  PerceptronLearner (dataset , learning_rate = 0.01 , epochs = 100 , optimizer = gradient_descent ,  batch_size = 1 ,  verbose = None ):
398399    """ 
399400    Simple perceptron neural network. 
400401    """ 
@@ -405,7 +406,8 @@ def PerceptronLearner(dataset, learning_rate=0.01, epochs=100, verbose=None):
405406    raw_net  =  [InputLayer (input_size ), DenseLayer (input_size , output_size )]
406407
407408    # update the network 
408-     learned_net  =  gradient_descent (dataset , raw_net , mse_loss , epochs , l_rate = learning_rate , verbose = verbose )
409+     learned_net  =  optimizer (dataset , raw_net , mean_squared_error_loss , epochs , l_rate = learning_rate ,
410+                             batch_size = batch_size , verbose = verbose )
409411
410412    def  predict (example ):
411413        layer_out  =  learned_net [1 ].forward (example )
@@ -419,7 +421,7 @@ def SimpleRNNLearner(train_data, val_data, epochs=2):
419421    RNN example for text sentimental analysis. 
420422    :param train_data: a tuple of (training data, targets) 
421423            Training data: ndarray taking training examples, while each example is coded by embedding 
422-             Targets: ndarray taking targets of each example. Each target is mapped to an integer.  
424+             Targets: ndarray taking targets of each example. Each target is mapped to an integer 
423425    :param val_data: a tuple of (validation data, targets) 
424426    :param epochs: number of epochs 
425427    :return: a keras model 
0 commit comments