popt.loop.optimize
1# Internal imports 2from popt.misc_tools import optim_tools as ot 3 4# External imports 5import os 6import numpy as np 7import logging 8import time 9import pickle 10 11# Gets or creates a logger 12logger = logging.getLogger(__name__) 13logger.setLevel(logging.DEBUG) # set log level 14file_handler = logging.FileHandler('popt.log') # define file handler and set formatter 15formatter = logging.Formatter('%(asctime)s : %(levelname)s : %(name)s : %(message)s') 16file_handler.setFormatter(formatter) 17logger.addHandler(file_handler) # add file handler to logger 18console_handler = logging.StreamHandler() 19console_handler.setFormatter(formatter) 20logger.addHandler(console_handler) 21 22 23class Optimize: 24 """ 25 Class for ensemble optimization algorithms. These are classified by calculating the sensitivity or gradient using 26 ensemble instead of classical derivatives. The loop is else as a classic optimization loop: a state (or control 27 variable) will be iterated upon using an algorithm defined in the update_scheme package. 28 29 Attributes 30 ---------- 31 logger : Logger 32 Print output to screen and log-file 33 34 pickle_restart_file : str 35 Save name for pickle dump/load 36 37 optimize_result : OptimizeResult 38 Dictionary with results for the current iteration 39 40 iteration : int 41 Iteration index 42 43 max_iter : int 44 Max number of iterations 45 46 restart : bool 47 Restart flag 48 49 restartsave : bool 50 Save restart information flag 51 52 Methods 53 ------- 54 run_loop() 55 The main optimization loop 56 57 save() 58 Save restart file 59 60 load() 61 Load restart file 62 63 calc_update() 64 Empty dummy function, actual functionality must be defined by the subclasses 65 66 """ 67 68 def __init__(self, **options): 69 """ 70 Parameters 71 ---------- 72 options: dict 73 Optimization options 74 """ 75 76 def __set__variable(var_name=None, defalut=None): 77 if var_name in options: 78 return options[var_name] 79 else: 80 return defalut 81 82 # Set the logger 83 self.logger = logger 84 85 # Save name for (potential) pickle dump/load 86 self.pickle_restart_file = 'popt_restart_dump' 87 88 # Dictionary with results for the current iteration 89 self.optimize_result = None 90 91 # Initial iteration index 92 self.iteration = 0 93 94 # Time counter and random generator 95 self.start_time = None 96 self.rnd = None 97 98 # Max number of iterations 99 self.max_iter = __set__variable('maxiter', 10) 100 101 # Restart flag 102 self.restart = __set__variable('restart', False) 103 104 # Save restart information flag 105 self.restartsave = __set__variable('restartsave', False) 106 107 # Optimze with external penalty function for constraints, provide r_0 as input 108 self.epf = __set__variable('epf', {}) 109 self.epf_iteration = 0 110 111 # Initialize variables (set in subclasses) 112 self.options = None 113 self.mean_state = None 114 self.obj_func_values = None 115 self.fun = None # objective function 116 self.obj_func_tol = None # objective tolerance limit 117 118 # Initialize number of function and jacobi evaluations 119 self.nfev = 0 120 self.njev = 0 121 122 def run_loop(self): 123 """ 124 This is the main optimization loop. 125 """ 126 127 # If it is a restart run, we load the self info that exists in the pickle save file. 128 if self.restart: 129 130 # Check if the pickle save file exists in folder 131 assert (self.pickle_restart_file in [f for f in os.listdir('.') if os.path.isfile(f)]), \ 132 'The restart file "{0}" does not exist in folder. Cannot restart!'.format(self.pickle_restart_file) 133 134 # Load restart file 135 self.load() 136 137 # Set the random generator to be the saved value 138 np.random.set_state(self.rnd) 139 140 else: 141 142 # delete potential restart files to avoid any problems 143 if self.pickle_restart_file in [f for f in os.listdir('.') if os.path.isfile(f)]: 144 os.remove(self.pickle_restart_file) 145 146 self.iteration += 1 147 148 # Check if external penalty function (epf) for handling constraints should be used 149 epf_not_converged = True 150 previous_state = None 151 if self.epf: 152 previous_state = self.mean_state 153 logger.info(f' -----> EPF-EnOpt: {self.epf_iteration}, {self.epf["r"]} (outer iteration, penalty factor)') # print epf info 154 155 while epf_not_converged: # outer loop using epf 156 157 # Run a while loop until max iterations or convergence is reached 158 is_successful = True 159 while self.iteration <= self.max_iter and is_successful: 160 161 # Update control variable 162 is_successful = self.calc_update() 163 164 # Save restart file (if requested) 165 if self.restartsave: 166 self.rnd = np.random.get_state() # get the current random state 167 self.save() 168 169 # Check if max iterations was reached 170 if self.iteration > self.max_iter: 171 self.optimize_result['message'] = 'Iterations stopped due to max iterations reached!' 172 else: 173 self.optimize_result['message'] = 'Convergence was met :)' 174 175 # Logging some info to screen 176 logger.info(' Optimization converged in %d iterations ', self.iteration-1) 177 logger.info(' Optimization converged with final obj_func = %.4f', 178 np.mean(self.optimize_result['fun'])) 179 logger.info(' Total number of function evaluations = %d', self.optimize_result['nfev']) 180 logger.info(' Total number of jacobi evaluations = %d', self.optimize_result['njev']) 181 if self.start_time is not None: 182 logger.info(' Total elapsed time = %.2f minutes', (time.perf_counter()-self.start_time)/60) 183 logger.info(' ============================================') 184 185 # Test for convergence of outer epf loop 186 epf_not_converged = False 187 if self.epf: 188 if self.epf_iteration > self.epf['max_epf_iter']: # max epf_iterations set to 10 189 logger.info(f' -----> EPF-EnOpt: maximum epf iterations reached') # print epf info 190 break 191 p = np.abs(previous_state-self.mean_state) / (np.abs(previous_state) + 1.0e-9) 192 conv_crit = self.epf['conv_crit'] 193 if np.any(p > conv_crit): 194 epf_not_converged = True 195 previous_state = self.mean_state 196 self.epf['r'] *= self.epf['r_factor'] # increase penalty factor 197 self.obj_func_tol *= self.epf['tol_factor'] # decrease tolerance 198 self.obj_func_values = self.fun(self.mean_state, **self.epf) 199 self.iteration = 0 200 self.epf_iteration += 1 201 optimize_result = ot.get_optimize_result(self) 202 ot.save_optimize_results(optimize_result) 203 self.nfev += 1 204 self.iteration = +1 205 r = self.epf['r'] 206 logger.info(f' -----> EPF-EnOpt: {self.epf_iteration}, {r} (outer iteration, penalty factor)') # print epf info 207 else: 208 logger.info(f' -----> EPF-EnOpt: converged, no variables changed more than {conv_crit*100} %') # print epf info 209 final_obj_no_penalty = str(round(float(self.fun(self.mean_state)),4)) 210 logger.info(f' -----> EPF-EnOpt: objective value without penalty = {final_obj_no_penalty}') # print epf info 211 212 def save(self): 213 """ 214 We use pickle to dump all the information we have in 'self'. Can be used, e.g., if some error has occurred. 215 """ 216 # Open save file and dump all info. in self 217 with open(self.pickle_restart_file, 'wb') as f: 218 pickle.dump(self.__dict__, f) 219 220 def load(self): 221 """ 222 Load a pickled file and save all info. in self. 223 """ 224 # Open file and read with pickle 225 with open(self.pickle_restart_file, 'rb') as f: 226 tmp_load = pickle.load(f) 227 228 # Save in 'self' 229 self.__dict__.update(tmp_load) 230 231 def calc_update(self): 232 """ 233 This is an empty dummy function. Actual functionality must be defined by the subclasses. 234 """ 235 pass
logger =
<Logger popt.loop.optimize (DEBUG)>
file_handler =
<FileHandler /home/runner/work/PET/PET/popt.log (NOTSET)>
formatter =
<logging.Formatter object>
console_handler =
<StreamHandler (NOTSET)>
class
Optimize:
24class Optimize: 25 """ 26 Class for ensemble optimization algorithms. These are classified by calculating the sensitivity or gradient using 27 ensemble instead of classical derivatives. The loop is else as a classic optimization loop: a state (or control 28 variable) will be iterated upon using an algorithm defined in the update_scheme package. 29 30 Attributes 31 ---------- 32 logger : Logger 33 Print output to screen and log-file 34 35 pickle_restart_file : str 36 Save name for pickle dump/load 37 38 optimize_result : OptimizeResult 39 Dictionary with results for the current iteration 40 41 iteration : int 42 Iteration index 43 44 max_iter : int 45 Max number of iterations 46 47 restart : bool 48 Restart flag 49 50 restartsave : bool 51 Save restart information flag 52 53 Methods 54 ------- 55 run_loop() 56 The main optimization loop 57 58 save() 59 Save restart file 60 61 load() 62 Load restart file 63 64 calc_update() 65 Empty dummy function, actual functionality must be defined by the subclasses 66 67 """ 68 69 def __init__(self, **options): 70 """ 71 Parameters 72 ---------- 73 options: dict 74 Optimization options 75 """ 76 77 def __set__variable(var_name=None, defalut=None): 78 if var_name in options: 79 return options[var_name] 80 else: 81 return defalut 82 83 # Set the logger 84 self.logger = logger 85 86 # Save name for (potential) pickle dump/load 87 self.pickle_restart_file = 'popt_restart_dump' 88 89 # Dictionary with results for the current iteration 90 self.optimize_result = None 91 92 # Initial iteration index 93 self.iteration = 0 94 95 # Time counter and random generator 96 self.start_time = None 97 self.rnd = None 98 99 # Max number of iterations 100 self.max_iter = __set__variable('maxiter', 10) 101 102 # Restart flag 103 self.restart = __set__variable('restart', False) 104 105 # Save restart information flag 106 self.restartsave = __set__variable('restartsave', False) 107 108 # Optimze with external penalty function for constraints, provide r_0 as input 109 self.epf = __set__variable('epf', {}) 110 self.epf_iteration = 0 111 112 # Initialize variables (set in subclasses) 113 self.options = None 114 self.mean_state = None 115 self.obj_func_values = None 116 self.fun = None # objective function 117 self.obj_func_tol = None # objective tolerance limit 118 119 # Initialize number of function and jacobi evaluations 120 self.nfev = 0 121 self.njev = 0 122 123 def run_loop(self): 124 """ 125 This is the main optimization loop. 126 """ 127 128 # If it is a restart run, we load the self info that exists in the pickle save file. 129 if self.restart: 130 131 # Check if the pickle save file exists in folder 132 assert (self.pickle_restart_file in [f for f in os.listdir('.') if os.path.isfile(f)]), \ 133 'The restart file "{0}" does not exist in folder. Cannot restart!'.format(self.pickle_restart_file) 134 135 # Load restart file 136 self.load() 137 138 # Set the random generator to be the saved value 139 np.random.set_state(self.rnd) 140 141 else: 142 143 # delete potential restart files to avoid any problems 144 if self.pickle_restart_file in [f for f in os.listdir('.') if os.path.isfile(f)]: 145 os.remove(self.pickle_restart_file) 146 147 self.iteration += 1 148 149 # Check if external penalty function (epf) for handling constraints should be used 150 epf_not_converged = True 151 previous_state = None 152 if self.epf: 153 previous_state = self.mean_state 154 logger.info(f' -----> EPF-EnOpt: {self.epf_iteration}, {self.epf["r"]} (outer iteration, penalty factor)') # print epf info 155 156 while epf_not_converged: # outer loop using epf 157 158 # Run a while loop until max iterations or convergence is reached 159 is_successful = True 160 while self.iteration <= self.max_iter and is_successful: 161 162 # Update control variable 163 is_successful = self.calc_update() 164 165 # Save restart file (if requested) 166 if self.restartsave: 167 self.rnd = np.random.get_state() # get the current random state 168 self.save() 169 170 # Check if max iterations was reached 171 if self.iteration > self.max_iter: 172 self.optimize_result['message'] = 'Iterations stopped due to max iterations reached!' 173 else: 174 self.optimize_result['message'] = 'Convergence was met :)' 175 176 # Logging some info to screen 177 logger.info(' Optimization converged in %d iterations ', self.iteration-1) 178 logger.info(' Optimization converged with final obj_func = %.4f', 179 np.mean(self.optimize_result['fun'])) 180 logger.info(' Total number of function evaluations = %d', self.optimize_result['nfev']) 181 logger.info(' Total number of jacobi evaluations = %d', self.optimize_result['njev']) 182 if self.start_time is not None: 183 logger.info(' Total elapsed time = %.2f minutes', (time.perf_counter()-self.start_time)/60) 184 logger.info(' ============================================') 185 186 # Test for convergence of outer epf loop 187 epf_not_converged = False 188 if self.epf: 189 if self.epf_iteration > self.epf['max_epf_iter']: # max epf_iterations set to 10 190 logger.info(f' -----> EPF-EnOpt: maximum epf iterations reached') # print epf info 191 break 192 p = np.abs(previous_state-self.mean_state) / (np.abs(previous_state) + 1.0e-9) 193 conv_crit = self.epf['conv_crit'] 194 if np.any(p > conv_crit): 195 epf_not_converged = True 196 previous_state = self.mean_state 197 self.epf['r'] *= self.epf['r_factor'] # increase penalty factor 198 self.obj_func_tol *= self.epf['tol_factor'] # decrease tolerance 199 self.obj_func_values = self.fun(self.mean_state, **self.epf) 200 self.iteration = 0 201 self.epf_iteration += 1 202 optimize_result = ot.get_optimize_result(self) 203 ot.save_optimize_results(optimize_result) 204 self.nfev += 1 205 self.iteration = +1 206 r = self.epf['r'] 207 logger.info(f' -----> EPF-EnOpt: {self.epf_iteration}, {r} (outer iteration, penalty factor)') # print epf info 208 else: 209 logger.info(f' -----> EPF-EnOpt: converged, no variables changed more than {conv_crit*100} %') # print epf info 210 final_obj_no_penalty = str(round(float(self.fun(self.mean_state)),4)) 211 logger.info(f' -----> EPF-EnOpt: objective value without penalty = {final_obj_no_penalty}') # print epf info 212 213 def save(self): 214 """ 215 We use pickle to dump all the information we have in 'self'. Can be used, e.g., if some error has occurred. 216 """ 217 # Open save file and dump all info. in self 218 with open(self.pickle_restart_file, 'wb') as f: 219 pickle.dump(self.__dict__, f) 220 221 def load(self): 222 """ 223 Load a pickled file and save all info. in self. 224 """ 225 # Open file and read with pickle 226 with open(self.pickle_restart_file, 'rb') as f: 227 tmp_load = pickle.load(f) 228 229 # Save in 'self' 230 self.__dict__.update(tmp_load) 231 232 def calc_update(self): 233 """ 234 This is an empty dummy function. Actual functionality must be defined by the subclasses. 235 """ 236 pass
Class for ensemble optimization algorithms. These are classified by calculating the sensitivity or gradient using ensemble instead of classical derivatives. The loop is else as a classic optimization loop: a state (or control variable) will be iterated upon using an algorithm defined in the update_scheme package.
Attributes
- logger (Logger): Print output to screen and log-file
- pickle_restart_file (str): Save name for pickle dump/load
- optimize_result (OptimizeResult): Dictionary with results for the current iteration
- iteration (int): Iteration index
- max_iter (int): Max number of iterations
- restart (bool): Restart flag
- restartsave (bool): Save restart information flag
Methods
run_loop() The main optimization loop
save() Save restart file
load() Load restart file
calc_update() Empty dummy function, actual functionality must be defined by the subclasses
Optimize(**options)
69 def __init__(self, **options): 70 """ 71 Parameters 72 ---------- 73 options: dict 74 Optimization options 75 """ 76 77 def __set__variable(var_name=None, defalut=None): 78 if var_name in options: 79 return options[var_name] 80 else: 81 return defalut 82 83 # Set the logger 84 self.logger = logger 85 86 # Save name for (potential) pickle dump/load 87 self.pickle_restart_file = 'popt_restart_dump' 88 89 # Dictionary with results for the current iteration 90 self.optimize_result = None 91 92 # Initial iteration index 93 self.iteration = 0 94 95 # Time counter and random generator 96 self.start_time = None 97 self.rnd = None 98 99 # Max number of iterations 100 self.max_iter = __set__variable('maxiter', 10) 101 102 # Restart flag 103 self.restart = __set__variable('restart', False) 104 105 # Save restart information flag 106 self.restartsave = __set__variable('restartsave', False) 107 108 # Optimze with external penalty function for constraints, provide r_0 as input 109 self.epf = __set__variable('epf', {}) 110 self.epf_iteration = 0 111 112 # Initialize variables (set in subclasses) 113 self.options = None 114 self.mean_state = None 115 self.obj_func_values = None 116 self.fun = None # objective function 117 self.obj_func_tol = None # objective tolerance limit 118 119 # Initialize number of function and jacobi evaluations 120 self.nfev = 0 121 self.njev = 0
Parameters
- options (dict): Optimization options
def
run_loop(self):
123 def run_loop(self): 124 """ 125 This is the main optimization loop. 126 """ 127 128 # If it is a restart run, we load the self info that exists in the pickle save file. 129 if self.restart: 130 131 # Check if the pickle save file exists in folder 132 assert (self.pickle_restart_file in [f for f in os.listdir('.') if os.path.isfile(f)]), \ 133 'The restart file "{0}" does not exist in folder. Cannot restart!'.format(self.pickle_restart_file) 134 135 # Load restart file 136 self.load() 137 138 # Set the random generator to be the saved value 139 np.random.set_state(self.rnd) 140 141 else: 142 143 # delete potential restart files to avoid any problems 144 if self.pickle_restart_file in [f for f in os.listdir('.') if os.path.isfile(f)]: 145 os.remove(self.pickle_restart_file) 146 147 self.iteration += 1 148 149 # Check if external penalty function (epf) for handling constraints should be used 150 epf_not_converged = True 151 previous_state = None 152 if self.epf: 153 previous_state = self.mean_state 154 logger.info(f' -----> EPF-EnOpt: {self.epf_iteration}, {self.epf["r"]} (outer iteration, penalty factor)') # print epf info 155 156 while epf_not_converged: # outer loop using epf 157 158 # Run a while loop until max iterations or convergence is reached 159 is_successful = True 160 while self.iteration <= self.max_iter and is_successful: 161 162 # Update control variable 163 is_successful = self.calc_update() 164 165 # Save restart file (if requested) 166 if self.restartsave: 167 self.rnd = np.random.get_state() # get the current random state 168 self.save() 169 170 # Check if max iterations was reached 171 if self.iteration > self.max_iter: 172 self.optimize_result['message'] = 'Iterations stopped due to max iterations reached!' 173 else: 174 self.optimize_result['message'] = 'Convergence was met :)' 175 176 # Logging some info to screen 177 logger.info(' Optimization converged in %d iterations ', self.iteration-1) 178 logger.info(' Optimization converged with final obj_func = %.4f', 179 np.mean(self.optimize_result['fun'])) 180 logger.info(' Total number of function evaluations = %d', self.optimize_result['nfev']) 181 logger.info(' Total number of jacobi evaluations = %d', self.optimize_result['njev']) 182 if self.start_time is not None: 183 logger.info(' Total elapsed time = %.2f minutes', (time.perf_counter()-self.start_time)/60) 184 logger.info(' ============================================') 185 186 # Test for convergence of outer epf loop 187 epf_not_converged = False 188 if self.epf: 189 if self.epf_iteration > self.epf['max_epf_iter']: # max epf_iterations set to 10 190 logger.info(f' -----> EPF-EnOpt: maximum epf iterations reached') # print epf info 191 break 192 p = np.abs(previous_state-self.mean_state) / (np.abs(previous_state) + 1.0e-9) 193 conv_crit = self.epf['conv_crit'] 194 if np.any(p > conv_crit): 195 epf_not_converged = True 196 previous_state = self.mean_state 197 self.epf['r'] *= self.epf['r_factor'] # increase penalty factor 198 self.obj_func_tol *= self.epf['tol_factor'] # decrease tolerance 199 self.obj_func_values = self.fun(self.mean_state, **self.epf) 200 self.iteration = 0 201 self.epf_iteration += 1 202 optimize_result = ot.get_optimize_result(self) 203 ot.save_optimize_results(optimize_result) 204 self.nfev += 1 205 self.iteration = +1 206 r = self.epf['r'] 207 logger.info(f' -----> EPF-EnOpt: {self.epf_iteration}, {r} (outer iteration, penalty factor)') # print epf info 208 else: 209 logger.info(f' -----> EPF-EnOpt: converged, no variables changed more than {conv_crit*100} %') # print epf info 210 final_obj_no_penalty = str(round(float(self.fun(self.mean_state)),4)) 211 logger.info(f' -----> EPF-EnOpt: objective value without penalty = {final_obj_no_penalty}') # print epf info
This is the main optimization loop.
def
save(self):
213 def save(self): 214 """ 215 We use pickle to dump all the information we have in 'self'. Can be used, e.g., if some error has occurred. 216 """ 217 # Open save file and dump all info. in self 218 with open(self.pickle_restart_file, 'wb') as f: 219 pickle.dump(self.__dict__, f)
We use pickle to dump all the information we have in 'self'. Can be used, e.g., if some error has occurred.
def
load(self):
221 def load(self): 222 """ 223 Load a pickled file and save all info. in self. 224 """ 225 # Open file and read with pickle 226 with open(self.pickle_restart_file, 'rb') as f: 227 tmp_load = pickle.load(f) 228 229 # Save in 'self' 230 self.__dict__.update(tmp_load)
Load a pickled file and save all info. in self.