@@ -73,8 +73,8 @@ def phi(alpha1):
7373 return alpha , fc [0 ], phi1
7474
7575
76- def do_linesearch (cost , G , deltaG , Mi , f_val ,
77- armijo = True , C1 = None , C2 = None , reg = None , Gc = None , constC = None , M = None ):
76+ def solve_linesearch (cost , G , deltaG , Mi , f_val ,
77+ armijo = True , C1 = None , C2 = None , reg = None , Gc = None , constC = None , M = None ):
7878 """
7979 Solve the linesearch in the FW iterations
8080 Parameters
@@ -93,17 +93,17 @@ def do_linesearch(cost, G, deltaG, Mi, f_val,
9393 If True the steps of the line-search is found via an armijo research. Else closed form is used.
9494 If there is convergence issues use False.
9595 C1 : ndarray (ns,ns), optional
96- Structure matrix in the source domain. Only used when armijo=False
96+ Structure matrix in the source domain. Only used and necessary when armijo=False
9797 C2 : ndarray (nt,nt), optional
98- Structure matrix in the target domain. Only used when armijo=False
98+ Structure matrix in the target domain. Only used and necessary when armijo=False
9999 reg : float, optional
100- Regularization parameter. Only used when armijo=False
100+ Regularization parameter. Only used and necessary when armijo=False
101101 Gc : ndarray (ns,nt)
102- Optimal map found by linearization in the FW algorithm. Only used when armijo=False
102+ Optimal map found by linearization in the FW algorithm. Only used and necessary when armijo=False
103103 constC : ndarray (ns,nt)
104- Constant for the gromov cost. See [24]. Only used when armijo=False
104+ Constant for the gromov cost. See [24]. Only used and necessary when armijo=False
105105 M : ndarray (ns,nt), optional
106- Cost matrix between the features. Only used when armijo=False
106+ Cost matrix between the features. Only used and necessary when armijo=False
107107 Returns
108108 -------
109109 alpha : float
@@ -128,7 +128,7 @@ def do_linesearch(cost, G, deltaG, Mi, f_val,
128128 b = np .sum ((M + reg * constC ) * deltaG ) - 2 * reg * (np .sum (dot12 * G ) + np .sum (np .dot (C1 , G ).dot (C2 ) * deltaG ))
129129 c = cost (G )
130130
131- alpha = solve_1d_linesearch_quad_funct (a , b , c )
131+ alpha = solve_1d_linesearch_quad (a , b , c )
132132 fc = None
133133 f_val = cost (G + alpha * deltaG )
134134
@@ -181,7 +181,7 @@ def cg(a, b, M, reg, f, df, G0=None, numItermax=200,
181181 Print information along iterations
182182 log : bool, optional
183183 record log if True
184- kwargs : dict
184+ ** kwargs : dict
185185 Parameters for linesearch
186186
187187 Returns
@@ -244,7 +244,7 @@ def cost(G):
244244 deltaG = Gc - G
245245
246246 # line search
247- alpha , fc , f_val = do_linesearch (cost , G , deltaG , Mi , f_val , reg = reg , M = M , Gc = Gc , ** kwargs )
247+ alpha , fc , f_val = solve_linesearch (cost , G , deltaG , Mi , f_val , reg = reg , M = M , Gc = Gc , ** kwargs )
248248
249249 G = G + alpha * deltaG
250250
@@ -254,7 +254,7 @@ def cost(G):
254254
255255 abs_delta_fval = abs (f_val - old_fval )
256256 relative_delta_fval = abs_delta_fval / abs (f_val )
257- if relative_delta_fval < stopThr and abs_delta_fval < stopThr2 :
257+ if relative_delta_fval < stopThr or abs_delta_fval < stopThr2 :
258258 loop = 0
259259
260260 if log :
@@ -395,7 +395,7 @@ def cost(G):
395395 abs_delta_fval = abs (f_val - old_fval )
396396 relative_delta_fval = abs_delta_fval / abs (f_val )
397397
398- if relative_delta_fval < stopThr and abs_delta_fval < stopThr2 :
398+ if relative_delta_fval < stopThr or abs_delta_fval < stopThr2 :
399399 loop = 0
400400
401401 if log :
@@ -413,11 +413,11 @@ def cost(G):
413413 return G
414414
415415
416- def solve_1d_linesearch_quad_funct (a , b , c ):
416+ def solve_1d_linesearch_quad (a , b , c ):
417417 """
418- Solve on 0,1 the following problem:
418+ For any convex or non-convex 1d quadratic function f, solve on [ 0,1] the following problem:
419419 .. math::
420- \min f(x)=a*x^{2}+b*x+c
420+ \a rgmin f(x)=a*x^{2}+b*x+c
421421
422422 Parameters
423423 ----------
0 commit comments