@@ -134,9 +134,7 @@ def enet_coordinate_descent(np.ndarray[DOUBLE, ndim=1] w,
134134
135135 We minimize
136136
137- 1 norm(y - X w, 2)^2 + alpha norm(w, 1) + beta norm(w, 2)^2
138- - ----
139- 2 2
137+ (1/2) * norm(y - X w, 2)^2 + alpha norm(w, 1) + (beta/2) norm(w, 2)^2
140138
141139 """
142140
@@ -299,9 +297,7 @@ def sparse_enet_coordinate_descent(double[:] w,
299297
300298 We minimize:
301299
302- 1 norm(y - X w, 2)^2 + alpha norm(w, 1) + beta norm(w, 2)^2
303- - ----
304- 2 2
300+ (1/2) * norm(y - X w, 2)^2 + alpha norm(w, 1) + (beta/2) * norm(w, 2)^2
305301
306302 """
307303
@@ -498,9 +494,7 @@ def enet_coordinate_descent_gram(double[:] w, double alpha, double beta,
498494
499495 We minimize
500496
501- 1 w^T Q w - q^T w + alpha norm(w, 1) + beta norm(w, 2)^2
502- - ----
503- 2 2
497+ (1/2) * w^T Q w - q^T w + alpha norm(w, 1) + (beta/2) * norm(w, 2)^2
504498
505499 which amount to the Elastic-Net problem when:
506500 Q = X^T X (Gram matrix)
@@ -640,9 +634,7 @@ def enet_coordinate_descent_multi_task(double[::1, :] W, double l1_reg,
640634
641635 We minimize
642636
643- 1 norm(y - X w, 2)^2 + l1_reg ||w||_21 + l2_reg norm(w, 2)^2
644- - ----
645- 2 2
637+ (1/2) * norm(y - X w, 2)^2 + l1_reg ||w||_21 + (1/2) * l2_reg norm(w, 2)^2
646638
647639 """
648640 # get the data information into easy vars
0 commit comments