@@ -241,8 +241,6 @@ def line_search(self, X, y, sample_weight):
241241
242242 # np.sum(np.abs(self.gradient_old))
243243 sum_abs_grad_old = - 1
244- sum_abs_grad_previous = - 1 # Used to track sum|gradients| of i-1
245- has_improved_sum_abs_grad_previous = False
246244
247245 is_verbose = self .verbose >= 2
248246 if is_verbose :
@@ -298,52 +296,6 @@ def line_search(self, X, y, sample_weight):
298296 )
299297 if check :
300298 break
301- # 2.2 Deal with relative gradient differences around machine precision.
302- tiny_grad = sum_abs_grad_old * eps
303- abs_grad_improvement = np .abs (sum_abs_grad - sum_abs_grad_old )
304- check = abs_grad_improvement <= tiny_grad
305- if is_verbose :
306- print (
307- " check |sum(|gradient|) - sum(|gradient_old|)| <= eps * "
308- "sum(|gradient_old|):"
309- f" { abs_grad_improvement } <= { tiny_grad } { check } "
310- )
311- if check :
312- break
313- # 2.3 This is really the last resort.
314- # Check that sum(|gradient_{i-1}|) < sum(|gradient_{i-2}|)
315- # = has_improved_sum_abs_grad_previous
316- # If now sum(|gradient_{i}|) >= sum(|gradient_{i-1}|), this iteration
317- # made things worse and we should have stopped at i-1.
318- check = (
319- has_improved_sum_abs_grad_previous
320- and sum_abs_grad >= sum_abs_grad_previous
321- )
322- if is_verbose :
323- print (
324- " check if previously "
325- f"sum(|gradient_{ i - 1 } |) < sum(|gradient_{ i - 2 } |) but now "
326- f"sum(|gradient_{ i } |) >= sum(|gradient_{ i - 1 } |) { check } "
327- )
328- if check :
329- t /= beta # we go back to i-1
330- self .coef = self .coef_old + t * self .coef_newton
331- raw = self .raw_prediction + t * raw_prediction_newton
332- self .loss_value , self .gradient = self .linear_loss .loss_gradient (
333- coef = self .coef ,
334- X = X ,
335- y = y ,
336- sample_weight = sample_weight ,
337- l2_reg_strength = self .l2_reg_strength ,
338- n_threads = self .n_threads ,
339- raw_prediction = raw ,
340- )
341- break
342- # Calculate for the next iteration
343- has_improved_sum_abs_grad_previous = (
344- sum_abs_grad < sum_abs_grad_previous
345- )
346- sum_abs_grad_previous = sum_abs_grad
347299
348300 t *= beta
349301 else :
0 commit comments