@@ -526,6 +526,7 @@ def markov_blanket_sample(X, e, bn):
526526
527527# Umbrella Example [Fig. 15.2] 
528528
529+ 
529530class  HiddenMarkovModel :
530531
531532    """ A Hidden markov model which takes Transition model and Sensor model as inputs""" 
@@ -546,17 +547,18 @@ def sensor_dist(self, ev):
546547
547548def  forward (HMM , fv , ev ):
548549    prediction  =  vector_add (scalar_vector_product (fv [0 ], HMM .transition_model [0 ]),
549-                                  scalar_vector_product (fv [1 ], HMM .transition_model [1 ]))
550+                             scalar_vector_product (fv [1 ], HMM .transition_model [1 ]))
550551    sensor_dist  =  HMM .sensor_dist (ev )
551552
552553    return (normalize (element_wise_product (sensor_dist , prediction )))
553554
555+ 
554556def  backward (HMM , b , ev ):
555557    sensor_dist  =  HMM .sensor_dist (ev )
556558    prediction  =  element_wise_product (sensor_dist , b )
557559
558560    return (normalize (vector_add (scalar_vector_product (prediction [0 ], HMM .transition_model [0 ]),
559-                                   scalar_vector_product (prediction [1 ], HMM .transition_model [1 ]))))
561+                                 scalar_vector_product (prediction [1 ], HMM .transition_model [1 ]))))
560562
561563
562564def  forward_backward (HMM , ev , prior ):
@@ -571,7 +573,8 @@ def forward_backward(HMM, ev, prior):
571573    umbrellaHMM = HiddenMarkovModel(umbrella_transition, umbrella_sensor) 
572574
573575    >>> forward_backward(umbrellaHMM, umbrella_evidence, umbrella_prior) 
574-     [[0.6469, 0.3531], [0.8673, 0.1327], [0.8204, 0.1796], [0.3075, 0.6925], [0.8204, 0.1796], [0.8673, 0.1327]] 
576+     [[0.6469, 0.3531], [0.8673, 0.1327], [0.8204, 0.1796], 
577+      [0.3075, 0.6925], [0.8204, 0.1796], [0.8673, 0.1327]] 
575578    """ 
576579    t  =  len (ev )
577580    ev .insert (0 , None )  # to make the code look similar to pseudo code 
@@ -583,10 +586,10 @@ def forward_backward(HMM, ev, prior):
583586
584587    fv [0 ] =  prior 
585588
586-     for  i  in  range (1 , t +  1 ):
587-         fv [i ] =  forward (HMM , fv [i -  1 ], ev [i ])
589+     for  i  in  range (1 , t   +  1 ):
590+         fv [i ] =  forward (HMM , fv [i   -  1 ], ev [i ])
588591    for  i  in  range (t , - 1 , - 1 ):
589-         sv [i -  1 ] =  normalize (element_wise_product (fv [i ], b ))
592+         sv [i   -  1 ] =  normalize (element_wise_product (fv [i ], b ))
590593        b  =  backward (HMM , b , ev [i ])
591594        bv .append (b )
592595
@@ -600,14 +603,78 @@ def forward_backward(HMM, ev, prior):
600603
601604# _________________________________________________________________________ 
602605
606+ 
603607def  fixed_lag_smoothing (e_t , hmm , d ):
604608    """[Fig. 15.6]""" 
605609    unimplemented ()
606610
607611
608- def  particle_filtering (e , N , dbn ):
609-     """[Fig. 15.17]""" 
610-     unimplemented ()
612+ def  particle_filtering (e , N , HMM ):
613+     """ 
614+     Particle filtering considering two states variables 
615+     N = 10 
616+     umbrella_evidence = T 
617+     umbrella_prior = [0.5, 0.5] 
618+     umbrella_transition = [[0.7, 0.3], [0.3, 0.7]] 
619+     umbrella_sensor = [[0.9, 0.2], [0.1, 0.8]] 
620+     umbrellaHMM = HiddenMarkovModel(umbrella_transition, umbrella_sensor) 
621+ 
622+     >>> particle_filtering(umbrella_evidence, N, umbrellaHMM) 
623+     ['A', 'A', 'A', 'B', 'A', 'A', 'B', 'A', 'A', 'A', 'B'] 
624+ 
625+     NOTE: Output is an probabilistic answer, therfore can vary 
626+     """ 
627+     s  =  []
628+     dist  =  [0.5 , 0.5 ]
629+     # State Initialization 
630+     s  =  ['A'  if  probability (dist [0 ]) else  'B'  for  i  in  range (N )]
631+     # Weight Initialization 
632+     w  =  [0  for  i  in  range (N )]
633+     # STEP 1 
634+     # Propagate one step using transition model given prior state 
635+     dist  =  vector_add (scalar_vector_product (dist [0 ], HMM .transition_model [0 ]),
636+                       scalar_vector_product (dist [1 ], HMM .transition_model [1 ]))
637+     # Assign state according to probability 
638+     s  =  ['A'  if  probability (dist [0 ]) else  'B'  for  i  in  range (N )]
639+     w_tot  =  0 
640+     # Calculate importance weight given evidence e 
641+     for  i  in  range (N ):
642+         if  s [i ] ==  'A' :
643+             # P(U|A)*P(A) 
644+             w_i  =  HMM .sensor_dist (e )[0 ]* dist [0 ]
645+         if  s [i ] ==  'B' :
646+             # P(U|B)*P(B) 
647+             w_i  =  HMM .sensor_dist (e )[1 ]* dist [1 ]
648+         w [i ] =  w_i 
649+         w_tot  +=  w_i 
650+ 
651+     # Normalize all the weights 
652+     for  i  in  range (N ):
653+         w [i ] =  w [i ]/ w_tot 
654+ 
655+     # Limit weights to 4 digits 
656+     for  i  in  range (N ):
657+         w [i ] =  float ("{0:.4f}" .format (w [i ]))
658+ 
659+     # STEP 2 
660+     s  =  weighted_sample_with_replacement (N , s , w )
661+     return  s 
662+ 
663+ 
664+ def  weighted_sample_with_replacement (N , s , w ):
665+     """ 
666+     Performs Weighted sampling over the paricles given weights of each particle. 
667+     We keep on picking random states unitll we fill N number states in new distribution 
668+     """ 
669+     s_wtd  =  []
670+     cnt  =  0 
671+     while  (cnt  <=  N ):
672+         # Generate a random number from 0 to N-1 
673+         i  =  random .randint (0 , N - 1 )
674+         if  (probability (w [i ])):
675+             s_wtd .append (s [i ])
676+             cnt  +=  1 
677+     return  s_wtd 
611678
612679# _________________________________________________________________________ 
613680__doc__  +=  """ 
0 commit comments