popt.update_schemes.linesearch
1# External imports 2import numpy as np 3import time 4import pprint 5import warnings 6 7from numpy import linalg as la 8from scipy.optimize import line_search 9 10# Internal imports 11from popt.misc_tools import optim_tools as ot 12from popt.loop.optimize import Optimize 13 14# ignore line_search did not converge message 15warnings.filterwarnings('ignore', message='The line search algorithm did not converge') 16 17 18class LineSearch(Optimize): 19 20 def __init__(self, fun, x, args, jac, hess, bounds=None, **options): 21 22 # init PETEnsemble 23 super(LineSearch, self).__init__(**options) 24 25 def __set__variable(var_name=None, defalut=None): 26 if var_name in options: 27 return options[var_name] 28 else: 29 return defalut 30 31 # Set input as class variables 32 self.options = options # options 33 self.fun = fun # objective function 34 self.cov = args[0] # initial covariance 35 self.jac = jac # gradient function 36 self.hess = hess # hessian function 37 self.bounds = bounds # parameter bounds 38 self.mean_state = x # initial mean state 39 self.pk_from_ls = None 40 41 # Set other optimization parameters 42 self.alpha_iter_max = __set__variable('alpha_maxiter', 5) 43 self.alpha_cov = __set__variable('alpha_cov', 0.001) 44 self.normalize = __set__variable('normalize', True) 45 self.max_resample = __set__variable('resample', 0) 46 self.normalize = __set__variable('normalize', True) 47 self.cov_factor = __set__variable('cov_factor', 0.5) 48 self.alpha = 0.0 49 50 # Initialize line-search parameters (scipy defaults for c1, and c2) 51 self.alpha_max = __set__variable('alpha_max', 1.0) 52 self.ls_options = {'c1': __set__variable('c1', 0.0001), 53 'c2': __set__variable('c2', 0.9)} 54 55 56 # Calculate objective function of startpoint 57 if not self.restart: 58 self.start_time = time.perf_counter() 59 self.obj_func_values = self.fun(self.mean_state) 60 self.nfev += 1 61 self.optimize_result = ot.get_optimize_result(self) 62 ot.save_optimize_results(self.optimize_result) 63 if self.logger is not None: 64 self.logger.info(' ====== Running optimization - EnOpt ======') 65 self.logger.info('\n'+pprint.pformat(self.options)) 66 info_str = ' {:<10} {:<10} {:<15} {:<15} {:<15} '.format('iter', 'alpha_iter', 67 'obj_func', 'step-size', 'cov[0,0]') 68 self.logger.info(info_str) 69 self.logger.info(' {:<21} {:<15.4e}'.format(self.iteration, np.mean(self.obj_func_values))) 70 71 72 self.run_loop() 73 74 def set_amax(self, xk, dk): 75 '''not used currently''' 76 amax = np.zeros_like(xk) 77 for i, xi in enumerate(xk): 78 lower, upper = self.bounds[i] 79 if np.sign(dk[i]) == 1: 80 amax[i] = (upper-xi)/dk[i] 81 else: 82 amax[i] = (lower-xi)/dk[i] 83 return np.min(amax) 84 85 def calc_update(self): 86 87 # Initialize variables for this step 88 success = False 89 90 # define dummy functions for scipy.line_search 91 def _jac(x): 92 self.njev += 1 93 x = ot.clip_state(x, self.bounds) # ensure bounds are respected 94 g = self.jac(x, self.cov) 95 g = g/la.norm(g, np.inf) if self.normalize else g 96 return g 97 98 def _fun(x): 99 self.nfev += 1 100 x = ot.clip_state(x, self.bounds) # ensure bounds are respected 101 f = self.fun(x, self.cov).mean() 102 return f 103 104 #compute gradient. If a line_search is already done, the new grdient is alread returned as slope by the function 105 if self.pk_from_ls is None: 106 pk = _jac(self.mean_state) 107 else: 108 pk = self.pk_from_ls 109 110 # Compute the hessian 111 hessian = self.hess() 112 if self.normalize: 113 hessian /= np.maximum(la.norm(hessian, np.inf), 1e-12) # scale the hessian with inf-norm 114 115 116 # perform line search 117 self.logger.info('Performing line search...') 118 ls_results = line_search(f=_fun, 119 myfprime=_jac, 120 xk=self.mean_state, 121 pk=-pk, 122 gfk=pk, 123 old_fval=self.obj_func_values.mean(), 124 c1=self.ls_options['c1'], 125 c2=self.ls_options['c2'], 126 amax=self.alpha_max, 127 maxiter=self.alpha_iter_max) 128 129 step_size, nfev, njev, fnew, fold, slope = ls_results 130 131 if isinstance(step_size, float): 132 self.logger.info('Strong Wolfie conditions satisfied') 133 134 # update state 135 self.mean_state = ot.clip_state(self.mean_state - step_size*pk, self.bounds) 136 self.obj_func_values = fnew 137 self.alpha = step_size 138 self.pk_from_ls = slope 139 140 # Update covariance 141 self.cov = self.cov - self.alpha_cov * hessian 142 self.cov = ot.get_sym_pos_semidef(self.cov) 143 144 # update status 145 success = True 146 self.optimize_result = ot.get_optimize_result(self) 147 ot.save_optimize_results(self.optimize_result) 148 149 # Write logging info 150 if self.logger is not None: 151 info_str_iter = ' {:<10} {:<10} {:<15.4e} {:<15.2e} {:<15.2e}'.\ 152 format(self.iteration, 0, np.mean(self.obj_func_values), 153 self.alpha, self.cov[0, 0]) 154 self.logger.info(info_str_iter) 155 156 # update iteration 157 self.iteration += 1 158 159 else: 160 self.logger.info('Strong Wolfie conditions not satisfied!') 161 success = False 162 163 return success
19class LineSearch(Optimize): 20 21 def __init__(self, fun, x, args, jac, hess, bounds=None, **options): 22 23 # init PETEnsemble 24 super(LineSearch, self).__init__(**options) 25 26 def __set__variable(var_name=None, defalut=None): 27 if var_name in options: 28 return options[var_name] 29 else: 30 return defalut 31 32 # Set input as class variables 33 self.options = options # options 34 self.fun = fun # objective function 35 self.cov = args[0] # initial covariance 36 self.jac = jac # gradient function 37 self.hess = hess # hessian function 38 self.bounds = bounds # parameter bounds 39 self.mean_state = x # initial mean state 40 self.pk_from_ls = None 41 42 # Set other optimization parameters 43 self.alpha_iter_max = __set__variable('alpha_maxiter', 5) 44 self.alpha_cov = __set__variable('alpha_cov', 0.001) 45 self.normalize = __set__variable('normalize', True) 46 self.max_resample = __set__variable('resample', 0) 47 self.normalize = __set__variable('normalize', True) 48 self.cov_factor = __set__variable('cov_factor', 0.5) 49 self.alpha = 0.0 50 51 # Initialize line-search parameters (scipy defaults for c1, and c2) 52 self.alpha_max = __set__variable('alpha_max', 1.0) 53 self.ls_options = {'c1': __set__variable('c1', 0.0001), 54 'c2': __set__variable('c2', 0.9)} 55 56 57 # Calculate objective function of startpoint 58 if not self.restart: 59 self.start_time = time.perf_counter() 60 self.obj_func_values = self.fun(self.mean_state) 61 self.nfev += 1 62 self.optimize_result = ot.get_optimize_result(self) 63 ot.save_optimize_results(self.optimize_result) 64 if self.logger is not None: 65 self.logger.info(' ====== Running optimization - EnOpt ======') 66 self.logger.info('\n'+pprint.pformat(self.options)) 67 info_str = ' {:<10} {:<10} {:<15} {:<15} {:<15} '.format('iter', 'alpha_iter', 68 'obj_func', 'step-size', 'cov[0,0]') 69 self.logger.info(info_str) 70 self.logger.info(' {:<21} {:<15.4e}'.format(self.iteration, np.mean(self.obj_func_values))) 71 72 73 self.run_loop() 74 75 def set_amax(self, xk, dk): 76 '''not used currently''' 77 amax = np.zeros_like(xk) 78 for i, xi in enumerate(xk): 79 lower, upper = self.bounds[i] 80 if np.sign(dk[i]) == 1: 81 amax[i] = (upper-xi)/dk[i] 82 else: 83 amax[i] = (lower-xi)/dk[i] 84 return np.min(amax) 85 86 def calc_update(self): 87 88 # Initialize variables for this step 89 success = False 90 91 # define dummy functions for scipy.line_search 92 def _jac(x): 93 self.njev += 1 94 x = ot.clip_state(x, self.bounds) # ensure bounds are respected 95 g = self.jac(x, self.cov) 96 g = g/la.norm(g, np.inf) if self.normalize else g 97 return g 98 99 def _fun(x): 100 self.nfev += 1 101 x = ot.clip_state(x, self.bounds) # ensure bounds are respected 102 f = self.fun(x, self.cov).mean() 103 return f 104 105 #compute gradient. If a line_search is already done, the new grdient is alread returned as slope by the function 106 if self.pk_from_ls is None: 107 pk = _jac(self.mean_state) 108 else: 109 pk = self.pk_from_ls 110 111 # Compute the hessian 112 hessian = self.hess() 113 if self.normalize: 114 hessian /= np.maximum(la.norm(hessian, np.inf), 1e-12) # scale the hessian with inf-norm 115 116 117 # perform line search 118 self.logger.info('Performing line search...') 119 ls_results = line_search(f=_fun, 120 myfprime=_jac, 121 xk=self.mean_state, 122 pk=-pk, 123 gfk=pk, 124 old_fval=self.obj_func_values.mean(), 125 c1=self.ls_options['c1'], 126 c2=self.ls_options['c2'], 127 amax=self.alpha_max, 128 maxiter=self.alpha_iter_max) 129 130 step_size, nfev, njev, fnew, fold, slope = ls_results 131 132 if isinstance(step_size, float): 133 self.logger.info('Strong Wolfie conditions satisfied') 134 135 # update state 136 self.mean_state = ot.clip_state(self.mean_state - step_size*pk, self.bounds) 137 self.obj_func_values = fnew 138 self.alpha = step_size 139 self.pk_from_ls = slope 140 141 # Update covariance 142 self.cov = self.cov - self.alpha_cov * hessian 143 self.cov = ot.get_sym_pos_semidef(self.cov) 144 145 # update status 146 success = True 147 self.optimize_result = ot.get_optimize_result(self) 148 ot.save_optimize_results(self.optimize_result) 149 150 # Write logging info 151 if self.logger is not None: 152 info_str_iter = ' {:<10} {:<10} {:<15.4e} {:<15.2e} {:<15.2e}'.\ 153 format(self.iteration, 0, np.mean(self.obj_func_values), 154 self.alpha, self.cov[0, 0]) 155 self.logger.info(info_str_iter) 156 157 # update iteration 158 self.iteration += 1 159 160 else: 161 self.logger.info('Strong Wolfie conditions not satisfied!') 162 success = False 163 164 return success
Class for ensemble optimization algorithms. These are classified by calculating the sensitivity or gradient using ensemble instead of classical derivatives. The loop is else as a classic optimization loop: a state (or control variable) will be iterated upon using an algorithm defined in the update_scheme package.
Attributes
- logger (Logger): Print output to screen and log-file
- pickle_restart_file (str): Save name for pickle dump/load
- optimize_result (OptimizeResult): Dictionary with results for the current iteration
- iteration (int): Iteration index
- max_iter (int): Max number of iterations
- restart (bool): Restart flag
- restartsave (bool): Save restart information flag
Methods
run_loop() The main optimization loop
save() Save restart file
load() Load restart file
calc_update() Empty dummy function, actual functionality must be defined by the subclasses
LineSearch(fun, x, args, jac, hess, bounds=None, **options)
21 def __init__(self, fun, x, args, jac, hess, bounds=None, **options): 22 23 # init PETEnsemble 24 super(LineSearch, self).__init__(**options) 25 26 def __set__variable(var_name=None, defalut=None): 27 if var_name in options: 28 return options[var_name] 29 else: 30 return defalut 31 32 # Set input as class variables 33 self.options = options # options 34 self.fun = fun # objective function 35 self.cov = args[0] # initial covariance 36 self.jac = jac # gradient function 37 self.hess = hess # hessian function 38 self.bounds = bounds # parameter bounds 39 self.mean_state = x # initial mean state 40 self.pk_from_ls = None 41 42 # Set other optimization parameters 43 self.alpha_iter_max = __set__variable('alpha_maxiter', 5) 44 self.alpha_cov = __set__variable('alpha_cov', 0.001) 45 self.normalize = __set__variable('normalize', True) 46 self.max_resample = __set__variable('resample', 0) 47 self.normalize = __set__variable('normalize', True) 48 self.cov_factor = __set__variable('cov_factor', 0.5) 49 self.alpha = 0.0 50 51 # Initialize line-search parameters (scipy defaults for c1, and c2) 52 self.alpha_max = __set__variable('alpha_max', 1.0) 53 self.ls_options = {'c1': __set__variable('c1', 0.0001), 54 'c2': __set__variable('c2', 0.9)} 55 56 57 # Calculate objective function of startpoint 58 if not self.restart: 59 self.start_time = time.perf_counter() 60 self.obj_func_values = self.fun(self.mean_state) 61 self.nfev += 1 62 self.optimize_result = ot.get_optimize_result(self) 63 ot.save_optimize_results(self.optimize_result) 64 if self.logger is not None: 65 self.logger.info(' ====== Running optimization - EnOpt ======') 66 self.logger.info('\n'+pprint.pformat(self.options)) 67 info_str = ' {:<10} {:<10} {:<15} {:<15} {:<15} '.format('iter', 'alpha_iter', 68 'obj_func', 'step-size', 'cov[0,0]') 69 self.logger.info(info_str) 70 self.logger.info(' {:<21} {:<15.4e}'.format(self.iteration, np.mean(self.obj_func_values))) 71 72 73 self.run_loop()
Parameters
- options (dict): Optimization options
def
set_amax(self, xk, dk):
75 def set_amax(self, xk, dk): 76 '''not used currently''' 77 amax = np.zeros_like(xk) 78 for i, xi in enumerate(xk): 79 lower, upper = self.bounds[i] 80 if np.sign(dk[i]) == 1: 81 amax[i] = (upper-xi)/dk[i] 82 else: 83 amax[i] = (lower-xi)/dk[i] 84 return np.min(amax)
not used currently
def
calc_update(self):
86 def calc_update(self): 87 88 # Initialize variables for this step 89 success = False 90 91 # define dummy functions for scipy.line_search 92 def _jac(x): 93 self.njev += 1 94 x = ot.clip_state(x, self.bounds) # ensure bounds are respected 95 g = self.jac(x, self.cov) 96 g = g/la.norm(g, np.inf) if self.normalize else g 97 return g 98 99 def _fun(x): 100 self.nfev += 1 101 x = ot.clip_state(x, self.bounds) # ensure bounds are respected 102 f = self.fun(x, self.cov).mean() 103 return f 104 105 #compute gradient. If a line_search is already done, the new grdient is alread returned as slope by the function 106 if self.pk_from_ls is None: 107 pk = _jac(self.mean_state) 108 else: 109 pk = self.pk_from_ls 110 111 # Compute the hessian 112 hessian = self.hess() 113 if self.normalize: 114 hessian /= np.maximum(la.norm(hessian, np.inf), 1e-12) # scale the hessian with inf-norm 115 116 117 # perform line search 118 self.logger.info('Performing line search...') 119 ls_results = line_search(f=_fun, 120 myfprime=_jac, 121 xk=self.mean_state, 122 pk=-pk, 123 gfk=pk, 124 old_fval=self.obj_func_values.mean(), 125 c1=self.ls_options['c1'], 126 c2=self.ls_options['c2'], 127 amax=self.alpha_max, 128 maxiter=self.alpha_iter_max) 129 130 step_size, nfev, njev, fnew, fold, slope = ls_results 131 132 if isinstance(step_size, float): 133 self.logger.info('Strong Wolfie conditions satisfied') 134 135 # update state 136 self.mean_state = ot.clip_state(self.mean_state - step_size*pk, self.bounds) 137 self.obj_func_values = fnew 138 self.alpha = step_size 139 self.pk_from_ls = slope 140 141 # Update covariance 142 self.cov = self.cov - self.alpha_cov * hessian 143 self.cov = ot.get_sym_pos_semidef(self.cov) 144 145 # update status 146 success = True 147 self.optimize_result = ot.get_optimize_result(self) 148 ot.save_optimize_results(self.optimize_result) 149 150 # Write logging info 151 if self.logger is not None: 152 info_str_iter = ' {:<10} {:<10} {:<15.4e} {:<15.2e} {:<15.2e}'.\ 153 format(self.iteration, 0, np.mean(self.obj_func_values), 154 self.alpha, self.cov[0, 0]) 155 self.logger.info(info_str_iter) 156 157 # update iteration 158 self.iteration += 1 159 160 else: 161 self.logger.info('Strong Wolfie conditions not satisfied!') 162 success = False 163 164 return success
This is an empty dummy function. Actual functionality must be defined by the subclasses.