Optimizer and Evolution Source Code


Evolution classes

optim.evolution.Evolution

Bases: ABC

This class implements an abstract evolution object.

Source code in aero_optim/optim/evolution.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
class Evolution(ABC):
    """
    This class implements an abstract evolution object.
    """
    def __init__(self, config: dict, debug: bool):
        self.custom_file: str = config["study"].get("custom_file", "")
        self.set_optimizer(debug=debug)
        self.optimizer: Type[Optimizer] = self.OptimizerClass(config)
        self.set_ea()

    @abstractmethod
    def set_optimizer(self, *args, **kwargs):
        """
        Sets the optimizer object.
        """
        self.OptimizerClass = (
            get_custom_class(self.custom_file, "CustomOptimizer") if self.custom_file else None
        )

    @abstractmethod
    def set_ea(self, *args, **kwargs):
        """
        Sets the evolutionary computation algorithm.
        """

    @abstractmethod
    def evolve(self, *args, **kwargs):
        """
        Defines how to execute the optimization.
        """

evolve(*args, **kwargs) abstractmethod

Defines how to execute the optimization.

Source code in aero_optim/optim/evolution.py
49
50
51
52
53
@abstractmethod
def evolve(self, *args, **kwargs):
    """
    Defines how to execute the optimization.
    """

set_ea(*args, **kwargs) abstractmethod

Sets the evolutionary computation algorithm.

Source code in aero_optim/optim/evolution.py
43
44
45
46
47
@abstractmethod
def set_ea(self, *args, **kwargs):
    """
    Sets the evolutionary computation algorithm.
    """

set_optimizer(*args, **kwargs) abstractmethod

Sets the optimizer object.

Source code in aero_optim/optim/evolution.py
34
35
36
37
38
39
40
41
@abstractmethod
def set_optimizer(self, *args, **kwargs):
    """
    Sets the optimizer object.
    """
    self.OptimizerClass = (
        get_custom_class(self.custom_file, "CustomOptimizer") if self.custom_file else None
    )

inspyred Evolution

optim.evolution.InspyredEvolution

Bases: Evolution

This class implements a default inspyred based evolution object.

Source code in aero_optim/optim/evolution.py
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
class InspyredEvolution(Evolution):
    """
    This class implements a default inspyred based evolution object.
    """
    def __init__(self, config: dict, debug: bool = False):
        super().__init__(config, debug)
        self.algorithm.observer = self.optimizer._observe
        self.algorithm.terminator = terminators.generation_termination

    def set_optimizer(self, debug: bool = False):
        """
        **Instantiates** the optimizer attribute as custom if any or from default classes.
        """
        super().set_optimizer()
        if not self.OptimizerClass:
            if debug:
                self.OptimizerClass = InspyredDebugOptimizer
            else:
                self.OptimizerClass = InspyredWolfOptimizer
            logger.info(f"optimizer set to {self.OptimizerClass}")

    def set_ea(self):
        """
        **Instantiates** the default algorithm attribute.
        """
        self.algorithm = inspyred_select_strategy(self.optimizer.strategy, self.optimizer.prng)

    def evolve(self):
        """
        **Executes** the default evolution method.
        """
        final_pop = self.algorithm.evolve(generator=self.optimizer.generator._ins_generator,
                                          evaluator=self.optimizer._evaluate,
                                          pop_size=self.optimizer.doe_size,
                                          max_generations=self.optimizer.max_generations,
                                          bounder=Bounder(*self.optimizer.bound),
                                          maximize=self.optimizer.maximize,
                                          **self.optimizer.ea_kwargs)

        self.optimizer.final_observe()

        # output results
        best = max(final_pop)
        index, opt_J = (
            max(enumerate(self.optimizer.J), key=ope.itemgetter(1)) if self.optimizer.maximize else
            min(enumerate(self.optimizer.J), key=ope.itemgetter(1))
        )
        gid, cid = (index // self.optimizer.doe_size, index % self.optimizer.doe_size)
        logger.info(f"optimal J: {opt_J} (J_ins: {best.fitness}),\n"
                    f"D: {' '.join([str(d) for d in self.optimizer.inputs[gid][cid]])}\n"
                    f"D_ins: {' '.join([str(d) for d in best.candidate[:self.optimizer.n_design]])}"
                    f"\n[g{gid}, c{cid}]")

evolve()

Executes the default evolution method.

Source code in aero_optim/optim/evolution.py
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
def evolve(self):
    """
    **Executes** the default evolution method.
    """
    final_pop = self.algorithm.evolve(generator=self.optimizer.generator._ins_generator,
                                      evaluator=self.optimizer._evaluate,
                                      pop_size=self.optimizer.doe_size,
                                      max_generations=self.optimizer.max_generations,
                                      bounder=Bounder(*self.optimizer.bound),
                                      maximize=self.optimizer.maximize,
                                      **self.optimizer.ea_kwargs)

    self.optimizer.final_observe()

    # output results
    best = max(final_pop)
    index, opt_J = (
        max(enumerate(self.optimizer.J), key=ope.itemgetter(1)) if self.optimizer.maximize else
        min(enumerate(self.optimizer.J), key=ope.itemgetter(1))
    )
    gid, cid = (index // self.optimizer.doe_size, index % self.optimizer.doe_size)
    logger.info(f"optimal J: {opt_J} (J_ins: {best.fitness}),\n"
                f"D: {' '.join([str(d) for d in self.optimizer.inputs[gid][cid]])}\n"
                f"D_ins: {' '.join([str(d) for d in best.candidate[:self.optimizer.n_design]])}"
                f"\n[g{gid}, c{cid}]")

set_ea()

Instantiates the default algorithm attribute.

Source code in aero_optim/optim/evolution.py
129
130
131
132
133
def set_ea(self):
    """
    **Instantiates** the default algorithm attribute.
    """
    self.algorithm = inspyred_select_strategy(self.optimizer.strategy, self.optimizer.prng)

set_optimizer(debug: bool = False)

Instantiates the optimizer attribute as custom if any or from default classes.

Source code in aero_optim/optim/evolution.py
117
118
119
120
121
122
123
124
125
126
127
def set_optimizer(self, debug: bool = False):
    """
    **Instantiates** the optimizer attribute as custom if any or from default classes.
    """
    super().set_optimizer()
    if not self.OptimizerClass:
        if debug:
            self.OptimizerClass = InspyredDebugOptimizer
        else:
            self.OptimizerClass = InspyredWolfOptimizer
        logger.info(f"optimizer set to {self.OptimizerClass}")

pymoo Evolution

optim.evolution.PymooEvolution

Bases: Evolution

This class implements a default pymoo based evolution object.

Source code in aero_optim/optim/evolution.py
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
class PymooEvolution(Evolution):
    """
    This class implements a default pymoo based evolution object.
    """
    def __init__(self, config: dict, debug: bool = False):
        super().__init__(config, debug)

    def set_optimizer(self, debug: bool = False):
        """
        **Instantiates** the optimizer attribute as custom if any or from default classes.
        """
        super().set_optimizer()
        if not self.OptimizerClass:
            if debug:
                self.OptimizerClass = PymooDebugOptimizer
            else:
                self.OptimizerClass = PymooWolfOptimizer
            logger.info(f"optimizer set to {self.OptimizerClass}")

    def set_ea(self):
        """
        **Instantiates** the default algorithm attribute.
        """
        self.algorithm = pymoo_select_strategy(
            self.optimizer.strategy,
            self.optimizer.doe_size,
            self.optimizer.generator._pymoo_generator(),
            self.optimizer.ea_kwargs
        )

    def evolve(self):
        """
        **Executes** the default evolution method.
        """
        res = minimize(problem=self.optimizer,
                       algorithm=self.algorithm,
                       termination=get_termination("n_gen", self.optimizer.max_generations),
                       seed=self.optimizer.seed,
                       verbose=True)

        self.optimizer.final_observe()

        # output results
        best = res.F
        index, opt_J = min(enumerate(self.optimizer.J), key=lambda x: abs(best - x[1]))
        gid, cid = (index // self.optimizer.doe_size, index % self.optimizer.doe_size)
        logger.info(f"optimal J: {opt_J} (J_pymoo: {best}),\n"
                    f"D: {' '.join([str(d) for d in self.optimizer.inputs[gid][cid]])}\n"
                    f"D_pymoo: {' '.join([str(d) for d in res.X])}\n"
                    f"[g{gid}, c{cid}]")

evolve()

Executes the default evolution method.

Source code in aero_optim/optim/evolution.py
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
def evolve(self):
    """
    **Executes** the default evolution method.
    """
    res = minimize(problem=self.optimizer,
                   algorithm=self.algorithm,
                   termination=get_termination("n_gen", self.optimizer.max_generations),
                   seed=self.optimizer.seed,
                   verbose=True)

    self.optimizer.final_observe()

    # output results
    best = res.F
    index, opt_J = min(enumerate(self.optimizer.J), key=lambda x: abs(best - x[1]))
    gid, cid = (index // self.optimizer.doe_size, index % self.optimizer.doe_size)
    logger.info(f"optimal J: {opt_J} (J_pymoo: {best}),\n"
                f"D: {' '.join([str(d) for d in self.optimizer.inputs[gid][cid]])}\n"
                f"D_pymoo: {' '.join([str(d) for d in res.X])}\n"
                f"[g{gid}, c{cid}]")

set_ea()

Instantiates the default algorithm attribute.

Source code in aero_optim/optim/evolution.py
75
76
77
78
79
80
81
82
83
84
def set_ea(self):
    """
    **Instantiates** the default algorithm attribute.
    """
    self.algorithm = pymoo_select_strategy(
        self.optimizer.strategy,
        self.optimizer.doe_size,
        self.optimizer.generator._pymoo_generator(),
        self.optimizer.ea_kwargs
    )

set_optimizer(debug: bool = False)

Instantiates the optimizer attribute as custom if any or from default classes.

Source code in aero_optim/optim/evolution.py
63
64
65
66
67
68
69
70
71
72
73
def set_optimizer(self, debug: bool = False):
    """
    **Instantiates** the optimizer attribute as custom if any or from default classes.
    """
    super().set_optimizer()
    if not self.OptimizerClass:
        if debug:
            self.OptimizerClass = PymooDebugOptimizer
        else:
            self.OptimizerClass = PymooWolfOptimizer
        logger.info(f"optimizer set to {self.OptimizerClass}")

Optimizer classes

optim.optimizer.Optimizer

Bases: ABC

This class implements an abstract optimizer.

Source code in aero_optim/optim/optimizer.py
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
class Optimizer(ABC):
    """
    This class implements an abstract optimizer.
    """
    def __init__(self, config: dict, debug: bool = False):
        """
        Instantiates the Optimizer object.

        **Input**

        - config (dict): the config file dictionary.
        - debug (bool): skip FFD and Mesh objects instantation for debugging purposes.

        **Inner**

        - n_design (int): the number of design variables (dimensions of the problem).
        - doe_size (int): the size of the initial and subsequent generations.
        - max_generations (int): the number of generations before termination.
        - dat_file (str): path to input_geometry.dat (baseline geometry).
        - outdir (str): highest level optimization output directory.

        Note:
            the result folder tree is structured as follows:
            ```
            outdir
            |__ FFD (contains <geom>_gXX_cYY.dat)
            |__ Figs (contains the figures generated during the optimization)
            |__ MESH (contains <geom>_gXX_cYY.mesh, .log, .geo_unrolled)
            |__ SOLVER
                |__ solver_gXX_cYY (contains the results of each simulation)
            ```

        - study_type (str): use-case/meshing routine.
        - ffd_type (str): deformation method.
        - strategy (str): the optimization algorithm amongst inspyred's [ES, PSO]
            and pymoo's [GA, PSO]</br>
            see https://pythonhosted.org/inspyred/examples.html#standard-algorithms</br>
            and https://pymoo.org/algorithms/list.html#nb-algorithms-list

        - maximize (bool): whether to maximize or minimize the objective QoIs.
        - budget (int): maximum number of concurrent proc in use.
        - nproc_per_sim (int): number of proc per simulation.
        - bound (tuple[float]): design variables boundaries.
        - custom_doe (str): path to a custom doe.
        - sampler_name (str): name of the sampling algorithm used to generate samples.
          the initial generation.
        - seed (int): seed number of the random processes involved in the optimization.
        - prng (random.Random): pseudo-random generator passed to inspyred generator.
        - ea_kwargs (dict): additional arguments to be passed to the evolution algorithm.
        - gen_ctr (int): generation counter.
        - generator (Generator): Generator object for the initial generation sampling.
        - ffd (FFD_2D): FFD_2D object to generate deformed geometries.
        - gmsh_mesh (Mesh): Mesh class to generate deformed geometries meshes.
        - simulator (Simulator): Simulator object to perform simulations.
        - mean (list[float]): list of populations mean fitness.
        - median (list[float]): list of populations median fitness.
        - max (list[float]): list of populations max fitness.
        - min (list[float]): list of populations min fitness.
        - J (list[float | list[float]]): the list of all generated candidates fitnesses.
        - inputs (list[list[np.ndarray]]): all input candidates.
        - ffd_profiles (list[list[np.ndarray]]): all deformed geometries {gid: {cid: ffd_profile}}.
        - QoI (str): the quantity of intereset to minimize/maximize.
        - n_plt (int): the number of best candidates results to display after each evaluation.
        - cmap (str): the colormaps used for the observer plot</br>
            see https://matplotlib.org/stable/users/explain/colors/colormaps.html.
        """
        self.config = config
        self.process_config()
        # required entries
        self.n_design: int = config["optim"]["n_design"]
        self.doe_size: int = config["optim"]["doe_size"]
        self.max_generations: int = config["optim"]["max_generations"]
        self.dat_file: str = config["study"]["file"]
        self.outdir: str = config["study"]["outdir"]
        self.study_type: str = config["study"]["study_type"]
        # optional entries
        self.ffd_type: str = config["study"].get("ffd_type", "")
        self.custom_file: str = config["study"].get("custom_file", "")
        self.strategy: str = config["optim"].get("strategy", "PSO")
        self.maximize: bool = config["optim"].get("maximize", False)
        self.budget: int = config["optim"].get("budget", 4)
        self.nproc_per_sim: int = config["optim"].get("nproc_per_sim", 1)
        self.bound: tuple[Any, ...] = tuple(config["optim"].get("bound", [-1, 1]))
        self.custom_doe: str = config["optim"].get("custom_doe", "")
        self.sampler_name: str = config["optim"].get("sampler_name", "lhs")
        self.ea_kwargs: dict = config["optim"].get("ea_kwargs", {})
        # reproducibility variables
        self.seed: int = config["optim"].get("seed", 123)
        self.prng: Random = Random()
        self.prng.seed(self.seed)
        # generation counter
        self.gen_ctr: int = 0
        # optimization objects
        if not debug:
            self.set_ffd_class()
            self.set_gmsh_mesh_class()
        self.generator: Generator = Generator(
            self.seed, self.n_design, self.doe_size, self.sampler_name, self.bound, self.custom_doe
        )
        self.set_simulator_class()
        self.simulator = self.SimulatorClass(self.config)
        # population statistics
        self.mean: list[float] = []
        self.median: list[float] = []
        self.max: list[float] = []
        self.min: list[float] = []
        # set other inner optimization variables
        self.J: list[float | list[float]] = []
        self.inputs: list[list[np.ndarray]] = []
        self.ffd_profiles: list[list[np.ndarray]] = []
        self.QoI: str = self.config["optim"].get("QoI", "CD")
        self.n_plt: int = self.config["optim"].get("n_plt", 5)
        self.cmap: str = self.config["optim"].get("cmap", "viridis")
        self.set_inner()
        # figure directory
        self.figdir: str = os.path.join(self.outdir, "Figs")
        check_dir(self.figdir)

    def process_config(self):
        """
        **Makes sure** the config file contains the required information.
        """
        logger.info("processing config..")
        if "n_design" not in self.config["optim"]:
            raise Exception(f"ERROR -- no <n_design> entry in {self.config['optim']}")
        if "doe_size" not in self.config["optim"]:
            raise Exception(f"ERROR -- no <doe_size> entry in {self.config['optim']}")
        if "max_generations" not in self.config["optim"]:
            raise Exception(f"ERROR -- no <max_generations> entry in {self.config['optim']}")
        if "file" not in self.config["study"]:
            raise Exception(f"ERROR -- no <file> entry in {self.config['study']}")
        if "budget" not in self.config["optim"]:
            logger.warning(f"no <budget> entry in {self.config['optim']}")
        if "nproc_per_sim" not in self.config["optim"]:
            logger.warning(f"no <nproc_per_sim> entry in {self.config['optim']}")
        if "bound" not in self.config["optim"]:
            logger.warning(f"no <bound> entry in {self.config['optim']}")
        if "sampler_name" not in self.config["optim"]:
            logger.warning(f"no <sampler_name> entry in {self.config['optim']}")
        if "seed" not in self.config["optim"]:
            logger.warning(f"no <seed> entry in {self.config['optim']}")
        #  alter config for optimization purposes
        if "outfile" in self.config["study"]:
            logger.warning(f"<outfile> entry in {self.config['study']} will be ignored")
            del self.config["study"]["outfile"]
        if "view" in self.config["gmsh"] and "GUI" in self.config["gmsh"]["view"]:
            logger.warning(
                f"<GUI> entry in {self.config['gmsh']['view']} forced to False"
            )
            self.config["gmsh"]["view"]["GUI"] = False

    def set_ffd_class(self):
        """
        **Instantiates** the deformation class and object as custom if found,
        as one of the default classes otherwise.
        """
        self.FFDClass = (
            get_custom_class(self.custom_file, "CustomFFD") if self.custom_file else None
        )
        if not self.FFDClass:
            if self.ffd_type == FFD_TYPE[0]:
                self.FFDClass = FFD_2D
                self.ffd = self.FFDClass(self.dat_file, self.n_design // 2)
            elif self.ffd_type == FFD_TYPE[1]:
                self.FFDClass = FFD_POD_2D
                self.config["ffd"]["ffd_ncontrol"] = self.n_design
                self.config["ffd"]["ffd_bound"] = self.bound
                logger.info(f"ffd bound: {self.bound}")
                self.ffd = self.FFDClass(self.dat_file, **self.config["ffd"])
                self.n_design = self.config["ffd"]["pod_ncontrol"]
                self.bound = self.config["ffd"].get("pod_bound", self.ffd.get_bound())
                logger.info(f"pod bound: {self.bound}")
            else:
                raise Exception(f"ERROR -- incorrect ffd_type <{self.ffd_type}>")
        else:
            self.ffd = self.FFDClass(self.dat_file, self.n_design, **self.config["ffd"])

    def set_gmsh_mesh_class(self):
        """
        **Instantiates** the mesher class as custom if found,
        as one of the default meshers otherwise.
        """
        self.MeshClass = (
            get_custom_class(self.custom_file, "CustomMesh") if self.custom_file else None
        )
        if not self.MeshClass:
            if self.study_type == STUDY_TYPE[0]:
                self.MeshClass = NACABaseMesh
            elif self.study_type == STUDY_TYPE[1]:
                self.MeshClass = NACABlockMesh
            elif self.study_type == STUDY_TYPE[2]:
                self.MeshClass = CascadeMesh
            else:
                raise Exception(f"ERROR -- incorrect study_type <{self.study_type}>")

    def set_inner(self):
        """
        **Sets** some use-case specific inner variables:
        """
        logger.info("set_inner not implemented")

    def deform(self, Delta: np.ndarray, gid: int, cid: int) -> tuple[str, np.ndarray]:
        """
        **Applies** FFD on a given candidate and returns its resulting file.
        """
        ffd_dir = os.path.join(self.outdir, "FFD")
        check_dir(ffd_dir)
        logger.info(f"g{gid}, c{cid} generate profile with deformation {Delta}")
        profile: np.ndarray = self.ffd.apply_ffd(Delta)
        return self.ffd.write_ffd(profile, Delta, ffd_dir, gid=gid, cid=cid), profile

    def mesh(self, ffdfile: str) -> str:
        """
        **Builds** mesh for a given candidate and returns its resulting file.

        Note:
            if a mesh file matching the pattern name already exists, it is not rebuilt.
        """
        mesh_dir = os.path.join(self.outdir, "MESH")
        check_dir(mesh_dir)
        gmsh_mesh = self.MeshClass(self.config, ffdfile)
        if os.path.isfile(gmsh_mesh.get_meshfile(mesh_dir)):
            return gmsh_mesh.get_meshfile(mesh_dir)
        gmsh_mesh.build_mesh()
        return gmsh_mesh.write_mesh(mesh_dir)

    def execute_candidates(self, candidates: list[Individual] | np.ndarray, gid: int):
        """
        **Executes** all candidates and **waits** for them to finish.

        Note:
            this method is meant to be called in _evaluate.
        """
        logger.info(f"evaluating candidates of generation {self.gen_ctr}..")
        self.ffd_profiles.append([])
        self.inputs.append([])
        for cid, cand in enumerate(candidates):
            self.inputs[gid].append(np.array(cand))
            ffd_file, ffd_profile = self.deform(cand, gid, cid)
            self.ffd_profiles[gid].append(ffd_profile)
            # meshing with proper sigint management
            # see https://gitlab.onelab.info/gmsh/gmsh/-/issues/842
            ORIGINAL_SIGINT_HANDLER = signal.signal(signal.SIGINT, signal.SIG_DFL)
            mesh_file = self.mesh(ffd_file)
            signal.signal(signal.SIGINT, ORIGINAL_SIGINT_HANDLER)
            while self.simulator.monitor_sim_progress() * self.nproc_per_sim >= self.budget:
                time.sleep(1)
            self.simulator.execute_sim(meshfile=mesh_file, gid=gid, cid=cid)

        # wait for last candidates to finish
        while self.simulator.monitor_sim_progress() > 0:
            time.sleep(0.1)

    def compute_statistics(self, gen_fitness: np.ndarray):
        """
        **Computes** generation statistics.

        Note:
            this method is meant to be called in `_observe`.
        """
        self.mean.append(np.mean(gen_fitness))
        self.median.append(np.median(gen_fitness))
        self.min.append(min(gen_fitness))
        self.max.append(max(gen_fitness))

    def _observe(self, *args, **kwargs):
        """
        **Plots** generation data after each evaluation.
        """
        logger.info("_observe not implemented")

    def plot_generation(
            self,
            gid: int,
            sorted_idx: np.ndarray,
            gen_fitness: np.ndarray,
            fig_name: str
    ):
        """
        **Plots** the results of the last evaluated generation.
        **Saves** the graph in the output directory.

        Note:
            this method is meant to be called in `_observe`.
        """
        logger.info("plot_generation not implemented")

    def plot_progress(self, gen_nbr: int, fig_name: str, baseline_value: float | None = None):
        """
        **Plots** and **saves** the overall progress of the optimization.

        Note:
            this method is meant to be called in `final_observe`.
        """
        logger.info(f"plotting populations statistics after {gen_nbr} generations..")

        # plot construction
        _, ax = plt.subplots(figsize=(8, 8))
        psize = self.doe_size
        if baseline_value:
            ax.axhline(y=baseline_value, color='k', ls="--", label="baseline")

        # plotting data
        best = self.max if self.maximize else self.min
        worst = self.min if self.maximize else self.max
        data = [self.mean, self.median, best, worst]
        colors = ["grey", "blue", "green", "red"]
        labels = ["mean", "median", "best", "worst"]
        for val, col, lab in zip(data, colors, labels):
            ax.plot(range(self.gen_ctr), val, color=col, label=lab)
        plt.fill_between(range(self.gen_ctr), data[2], data[3], color='#e6f2e6')
        plt.grid(True)
        ymin = min([min(d) for d in data])
        ymax = max([max(d) for d in data])
        yrange = ymax - ymin
        plt.ylim((ymin - 0.1 * yrange, ymax + 0.1 * yrange))
        ax.xaxis.set_major_locator(MaxNLocator(integer=True))
        # legend and title
        ax.set_title(f"Optimization evolution ({gen_nbr} g. x {psize} c.)")
        ax.legend(loc="center left", bbox_to_anchor=(1, 0.5))
        ax.set_xlabel('generation $[\\cdot]$')
        ax.set_ylabel('fitness')

        # save figure as png
        logger.info(f"saving {fig_name} to {self.outdir}")
        plt.savefig(os.path.join(self.outdir, fig_name), bbox_inches='tight')
        plt.close()

    def save_results(self):
        """
        **Saves** candidates and fitnesses to file.
        """
        logger.info(f"optimization results saved to {self.outdir}")
        np.savetxt(
            os.path.join(self.outdir, "candidates.txt"),
            np.reshape(self.inputs, (-1, self.n_design))
        )
        np.savetxt(os.path.join(self.outdir, "fitnesses.txt"), self.J)

    @abstractmethod
    def set_simulator_class(self):
        """
        Instantiates the simulator class with CustomSimulator if found.
        """
        self.SimulatorClass = (
            get_custom_class(self.custom_file, "CustomSimulator") if self.custom_file else None
        )

    @abstractmethod
    def _evaluate(self, *args, **kwargs) -> list[float | list[float]] | None:
        """
        Computes all candidates outputs and return the optimizer list of QoIs.
        """

__init__(config: dict, debug: bool = False)

Instantiates the Optimizer object.

Input

  • config (dict): the config file dictionary.
  • debug (bool): skip FFD and Mesh objects instantation for debugging purposes.

Inner

  • n_design (int): the number of design variables (dimensions of the problem).
  • doe_size (int): the size of the initial and subsequent generations.
  • max_generations (int): the number of generations before termination.
  • dat_file (str): path to input_geometry.dat (baseline geometry).
  • outdir (str): highest level optimization output directory.
Note

the result folder tree is structured as follows:

outdir
|__ FFD (contains <geom>_gXX_cYY.dat)
|__ Figs (contains the figures generated during the optimization)
|__ MESH (contains <geom>_gXX_cYY.mesh, .log, .geo_unrolled)
|__ SOLVER
    |__ solver_gXX_cYY (contains the results of each simulation)

  • study_type (str): use-case/meshing routine.
  • ffd_type (str): deformation method.
  • strategy (str): the optimization algorithm amongst inspyred's [ES, PSO] and pymoo's [GA, PSO]
    see https://pythonhosted.org/inspyred/examples.html#standard-algorithms
    and https://pymoo.org/algorithms/list.html#nb-algorithms-list

  • maximize (bool): whether to maximize or minimize the objective QoIs.

  • budget (int): maximum number of concurrent proc in use.
  • nproc_per_sim (int): number of proc per simulation.
  • bound (tuple[float]): design variables boundaries.
  • custom_doe (str): path to a custom doe.
  • sampler_name (str): name of the sampling algorithm used to generate samples. the initial generation.
  • seed (int): seed number of the random processes involved in the optimization.
  • prng (random.Random): pseudo-random generator passed to inspyred generator.
  • ea_kwargs (dict): additional arguments to be passed to the evolution algorithm.
  • gen_ctr (int): generation counter.
  • generator (Generator): Generator object for the initial generation sampling.
  • ffd (FFD_2D): FFD_2D object to generate deformed geometries.
  • gmsh_mesh (Mesh): Mesh class to generate deformed geometries meshes.
  • simulator (Simulator): Simulator object to perform simulations.
  • mean (list[float]): list of populations mean fitness.
  • median (list[float]): list of populations median fitness.
  • max (list[float]): list of populations max fitness.
  • min (list[float]): list of populations min fitness.
  • J (list[float | list[float]]): the list of all generated candidates fitnesses.
  • inputs (list[list[np.ndarray]]): all input candidates.
  • ffd_profiles (list[list[np.ndarray]]): all deformed geometries {gid: {cid: ffd_profile}}.
  • QoI (str): the quantity of intereset to minimize/maximize.
  • n_plt (int): the number of best candidates results to display after each evaluation.
  • cmap (str): the colormaps used for the observer plot
    see https://matplotlib.org/stable/users/explain/colors/colormaps.html.
Source code in aero_optim/optim/optimizer.py
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
def __init__(self, config: dict, debug: bool = False):
    """
    Instantiates the Optimizer object.

    **Input**

    - config (dict): the config file dictionary.
    - debug (bool): skip FFD and Mesh objects instantation for debugging purposes.

    **Inner**

    - n_design (int): the number of design variables (dimensions of the problem).
    - doe_size (int): the size of the initial and subsequent generations.
    - max_generations (int): the number of generations before termination.
    - dat_file (str): path to input_geometry.dat (baseline geometry).
    - outdir (str): highest level optimization output directory.

    Note:
        the result folder tree is structured as follows:
        ```
        outdir
        |__ FFD (contains <geom>_gXX_cYY.dat)
        |__ Figs (contains the figures generated during the optimization)
        |__ MESH (contains <geom>_gXX_cYY.mesh, .log, .geo_unrolled)
        |__ SOLVER
            |__ solver_gXX_cYY (contains the results of each simulation)
        ```

    - study_type (str): use-case/meshing routine.
    - ffd_type (str): deformation method.
    - strategy (str): the optimization algorithm amongst inspyred's [ES, PSO]
        and pymoo's [GA, PSO]</br>
        see https://pythonhosted.org/inspyred/examples.html#standard-algorithms</br>
        and https://pymoo.org/algorithms/list.html#nb-algorithms-list

    - maximize (bool): whether to maximize or minimize the objective QoIs.
    - budget (int): maximum number of concurrent proc in use.
    - nproc_per_sim (int): number of proc per simulation.
    - bound (tuple[float]): design variables boundaries.
    - custom_doe (str): path to a custom doe.
    - sampler_name (str): name of the sampling algorithm used to generate samples.
      the initial generation.
    - seed (int): seed number of the random processes involved in the optimization.
    - prng (random.Random): pseudo-random generator passed to inspyred generator.
    - ea_kwargs (dict): additional arguments to be passed to the evolution algorithm.
    - gen_ctr (int): generation counter.
    - generator (Generator): Generator object for the initial generation sampling.
    - ffd (FFD_2D): FFD_2D object to generate deformed geometries.
    - gmsh_mesh (Mesh): Mesh class to generate deformed geometries meshes.
    - simulator (Simulator): Simulator object to perform simulations.
    - mean (list[float]): list of populations mean fitness.
    - median (list[float]): list of populations median fitness.
    - max (list[float]): list of populations max fitness.
    - min (list[float]): list of populations min fitness.
    - J (list[float | list[float]]): the list of all generated candidates fitnesses.
    - inputs (list[list[np.ndarray]]): all input candidates.
    - ffd_profiles (list[list[np.ndarray]]): all deformed geometries {gid: {cid: ffd_profile}}.
    - QoI (str): the quantity of intereset to minimize/maximize.
    - n_plt (int): the number of best candidates results to display after each evaluation.
    - cmap (str): the colormaps used for the observer plot</br>
        see https://matplotlib.org/stable/users/explain/colors/colormaps.html.
    """
    self.config = config
    self.process_config()
    # required entries
    self.n_design: int = config["optim"]["n_design"]
    self.doe_size: int = config["optim"]["doe_size"]
    self.max_generations: int = config["optim"]["max_generations"]
    self.dat_file: str = config["study"]["file"]
    self.outdir: str = config["study"]["outdir"]
    self.study_type: str = config["study"]["study_type"]
    # optional entries
    self.ffd_type: str = config["study"].get("ffd_type", "")
    self.custom_file: str = config["study"].get("custom_file", "")
    self.strategy: str = config["optim"].get("strategy", "PSO")
    self.maximize: bool = config["optim"].get("maximize", False)
    self.budget: int = config["optim"].get("budget", 4)
    self.nproc_per_sim: int = config["optim"].get("nproc_per_sim", 1)
    self.bound: tuple[Any, ...] = tuple(config["optim"].get("bound", [-1, 1]))
    self.custom_doe: str = config["optim"].get("custom_doe", "")
    self.sampler_name: str = config["optim"].get("sampler_name", "lhs")
    self.ea_kwargs: dict = config["optim"].get("ea_kwargs", {})
    # reproducibility variables
    self.seed: int = config["optim"].get("seed", 123)
    self.prng: Random = Random()
    self.prng.seed(self.seed)
    # generation counter
    self.gen_ctr: int = 0
    # optimization objects
    if not debug:
        self.set_ffd_class()
        self.set_gmsh_mesh_class()
    self.generator: Generator = Generator(
        self.seed, self.n_design, self.doe_size, self.sampler_name, self.bound, self.custom_doe
    )
    self.set_simulator_class()
    self.simulator = self.SimulatorClass(self.config)
    # population statistics
    self.mean: list[float] = []
    self.median: list[float] = []
    self.max: list[float] = []
    self.min: list[float] = []
    # set other inner optimization variables
    self.J: list[float | list[float]] = []
    self.inputs: list[list[np.ndarray]] = []
    self.ffd_profiles: list[list[np.ndarray]] = []
    self.QoI: str = self.config["optim"].get("QoI", "CD")
    self.n_plt: int = self.config["optim"].get("n_plt", 5)
    self.cmap: str = self.config["optim"].get("cmap", "viridis")
    self.set_inner()
    # figure directory
    self.figdir: str = os.path.join(self.outdir, "Figs")
    check_dir(self.figdir)

_evaluate(*args, **kwargs) -> list[float | list[float]] | None abstractmethod

Computes all candidates outputs and return the optimizer list of QoIs.

Source code in aero_optim/optim/optimizer.py
381
382
383
384
385
@abstractmethod
def _evaluate(self, *args, **kwargs) -> list[float | list[float]] | None:
    """
    Computes all candidates outputs and return the optimizer list of QoIs.
    """

_observe(*args, **kwargs)

Plots generation data after each evaluation.

Source code in aero_optim/optim/optimizer.py
298
299
300
301
302
def _observe(self, *args, **kwargs):
    """
    **Plots** generation data after each evaluation.
    """
    logger.info("_observe not implemented")

compute_statistics(gen_fitness: np.ndarray)

Computes generation statistics.

Note

this method is meant to be called in _observe.

Source code in aero_optim/optim/optimizer.py
286
287
288
289
290
291
292
293
294
295
296
def compute_statistics(self, gen_fitness: np.ndarray):
    """
    **Computes** generation statistics.

    Note:
        this method is meant to be called in `_observe`.
    """
    self.mean.append(np.mean(gen_fitness))
    self.median.append(np.median(gen_fitness))
    self.min.append(min(gen_fitness))
    self.max.append(max(gen_fitness))

deform(Delta: np.ndarray, gid: int, cid: int) -> tuple[str, np.ndarray]

Applies FFD on a given candidate and returns its resulting file.

Source code in aero_optim/optim/optimizer.py
234
235
236
237
238
239
240
241
242
def deform(self, Delta: np.ndarray, gid: int, cid: int) -> tuple[str, np.ndarray]:
    """
    **Applies** FFD on a given candidate and returns its resulting file.
    """
    ffd_dir = os.path.join(self.outdir, "FFD")
    check_dir(ffd_dir)
    logger.info(f"g{gid}, c{cid} generate profile with deformation {Delta}")
    profile: np.ndarray = self.ffd.apply_ffd(Delta)
    return self.ffd.write_ffd(profile, Delta, ffd_dir, gid=gid, cid=cid), profile

execute_candidates(candidates: list[Individual] | np.ndarray, gid: int)

Executes all candidates and waits for them to finish.

Note

this method is meant to be called in _evaluate.

Source code in aero_optim/optim/optimizer.py
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
def execute_candidates(self, candidates: list[Individual] | np.ndarray, gid: int):
    """
    **Executes** all candidates and **waits** for them to finish.

    Note:
        this method is meant to be called in _evaluate.
    """
    logger.info(f"evaluating candidates of generation {self.gen_ctr}..")
    self.ffd_profiles.append([])
    self.inputs.append([])
    for cid, cand in enumerate(candidates):
        self.inputs[gid].append(np.array(cand))
        ffd_file, ffd_profile = self.deform(cand, gid, cid)
        self.ffd_profiles[gid].append(ffd_profile)
        # meshing with proper sigint management
        # see https://gitlab.onelab.info/gmsh/gmsh/-/issues/842
        ORIGINAL_SIGINT_HANDLER = signal.signal(signal.SIGINT, signal.SIG_DFL)
        mesh_file = self.mesh(ffd_file)
        signal.signal(signal.SIGINT, ORIGINAL_SIGINT_HANDLER)
        while self.simulator.monitor_sim_progress() * self.nproc_per_sim >= self.budget:
            time.sleep(1)
        self.simulator.execute_sim(meshfile=mesh_file, gid=gid, cid=cid)

    # wait for last candidates to finish
    while self.simulator.monitor_sim_progress() > 0:
        time.sleep(0.1)

mesh(ffdfile: str) -> str

Builds mesh for a given candidate and returns its resulting file.

Note

if a mesh file matching the pattern name already exists, it is not rebuilt.

Source code in aero_optim/optim/optimizer.py
244
245
246
247
248
249
250
251
252
253
254
255
256
257
def mesh(self, ffdfile: str) -> str:
    """
    **Builds** mesh for a given candidate and returns its resulting file.

    Note:
        if a mesh file matching the pattern name already exists, it is not rebuilt.
    """
    mesh_dir = os.path.join(self.outdir, "MESH")
    check_dir(mesh_dir)
    gmsh_mesh = self.MeshClass(self.config, ffdfile)
    if os.path.isfile(gmsh_mesh.get_meshfile(mesh_dir)):
        return gmsh_mesh.get_meshfile(mesh_dir)
    gmsh_mesh.build_mesh()
    return gmsh_mesh.write_mesh(mesh_dir)

plot_generation(gid: int, sorted_idx: np.ndarray, gen_fitness: np.ndarray, fig_name: str)

Plots the results of the last evaluated generation. Saves the graph in the output directory.

Note

this method is meant to be called in _observe.

Source code in aero_optim/optim/optimizer.py
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
def plot_generation(
        self,
        gid: int,
        sorted_idx: np.ndarray,
        gen_fitness: np.ndarray,
        fig_name: str
):
    """
    **Plots** the results of the last evaluated generation.
    **Saves** the graph in the output directory.

    Note:
        this method is meant to be called in `_observe`.
    """
    logger.info("plot_generation not implemented")

plot_progress(gen_nbr: int, fig_name: str, baseline_value: float | None = None)

Plots and saves the overall progress of the optimization.

Note

this method is meant to be called in final_observe.

Source code in aero_optim/optim/optimizer.py
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
def plot_progress(self, gen_nbr: int, fig_name: str, baseline_value: float | None = None):
    """
    **Plots** and **saves** the overall progress of the optimization.

    Note:
        this method is meant to be called in `final_observe`.
    """
    logger.info(f"plotting populations statistics after {gen_nbr} generations..")

    # plot construction
    _, ax = plt.subplots(figsize=(8, 8))
    psize = self.doe_size
    if baseline_value:
        ax.axhline(y=baseline_value, color='k', ls="--", label="baseline")

    # plotting data
    best = self.max if self.maximize else self.min
    worst = self.min if self.maximize else self.max
    data = [self.mean, self.median, best, worst]
    colors = ["grey", "blue", "green", "red"]
    labels = ["mean", "median", "best", "worst"]
    for val, col, lab in zip(data, colors, labels):
        ax.plot(range(self.gen_ctr), val, color=col, label=lab)
    plt.fill_between(range(self.gen_ctr), data[2], data[3], color='#e6f2e6')
    plt.grid(True)
    ymin = min([min(d) for d in data])
    ymax = max([max(d) for d in data])
    yrange = ymax - ymin
    plt.ylim((ymin - 0.1 * yrange, ymax + 0.1 * yrange))
    ax.xaxis.set_major_locator(MaxNLocator(integer=True))
    # legend and title
    ax.set_title(f"Optimization evolution ({gen_nbr} g. x {psize} c.)")
    ax.legend(loc="center left", bbox_to_anchor=(1, 0.5))
    ax.set_xlabel('generation $[\\cdot]$')
    ax.set_ylabel('fitness')

    # save figure as png
    logger.info(f"saving {fig_name} to {self.outdir}")
    plt.savefig(os.path.join(self.outdir, fig_name), bbox_inches='tight')
    plt.close()

process_config()

Makes sure the config file contains the required information.

Source code in aero_optim/optim/optimizer.py
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
def process_config(self):
    """
    **Makes sure** the config file contains the required information.
    """
    logger.info("processing config..")
    if "n_design" not in self.config["optim"]:
        raise Exception(f"ERROR -- no <n_design> entry in {self.config['optim']}")
    if "doe_size" not in self.config["optim"]:
        raise Exception(f"ERROR -- no <doe_size> entry in {self.config['optim']}")
    if "max_generations" not in self.config["optim"]:
        raise Exception(f"ERROR -- no <max_generations> entry in {self.config['optim']}")
    if "file" not in self.config["study"]:
        raise Exception(f"ERROR -- no <file> entry in {self.config['study']}")
    if "budget" not in self.config["optim"]:
        logger.warning(f"no <budget> entry in {self.config['optim']}")
    if "nproc_per_sim" not in self.config["optim"]:
        logger.warning(f"no <nproc_per_sim> entry in {self.config['optim']}")
    if "bound" not in self.config["optim"]:
        logger.warning(f"no <bound> entry in {self.config['optim']}")
    if "sampler_name" not in self.config["optim"]:
        logger.warning(f"no <sampler_name> entry in {self.config['optim']}")
    if "seed" not in self.config["optim"]:
        logger.warning(f"no <seed> entry in {self.config['optim']}")
    #  alter config for optimization purposes
    if "outfile" in self.config["study"]:
        logger.warning(f"<outfile> entry in {self.config['study']} will be ignored")
        del self.config["study"]["outfile"]
    if "view" in self.config["gmsh"] and "GUI" in self.config["gmsh"]["view"]:
        logger.warning(
            f"<GUI> entry in {self.config['gmsh']['view']} forced to False"
        )
        self.config["gmsh"]["view"]["GUI"] = False

save_results()

Saves candidates and fitnesses to file.

Source code in aero_optim/optim/optimizer.py
361
362
363
364
365
366
367
368
369
370
def save_results(self):
    """
    **Saves** candidates and fitnesses to file.
    """
    logger.info(f"optimization results saved to {self.outdir}")
    np.savetxt(
        os.path.join(self.outdir, "candidates.txt"),
        np.reshape(self.inputs, (-1, self.n_design))
    )
    np.savetxt(os.path.join(self.outdir, "fitnesses.txt"), self.J)

set_ffd_class()

Instantiates the deformation class and object as custom if found, as one of the default classes otherwise.

Source code in aero_optim/optim/optimizer.py
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
def set_ffd_class(self):
    """
    **Instantiates** the deformation class and object as custom if found,
    as one of the default classes otherwise.
    """
    self.FFDClass = (
        get_custom_class(self.custom_file, "CustomFFD") if self.custom_file else None
    )
    if not self.FFDClass:
        if self.ffd_type == FFD_TYPE[0]:
            self.FFDClass = FFD_2D
            self.ffd = self.FFDClass(self.dat_file, self.n_design // 2)
        elif self.ffd_type == FFD_TYPE[1]:
            self.FFDClass = FFD_POD_2D
            self.config["ffd"]["ffd_ncontrol"] = self.n_design
            self.config["ffd"]["ffd_bound"] = self.bound
            logger.info(f"ffd bound: {self.bound}")
            self.ffd = self.FFDClass(self.dat_file, **self.config["ffd"])
            self.n_design = self.config["ffd"]["pod_ncontrol"]
            self.bound = self.config["ffd"].get("pod_bound", self.ffd.get_bound())
            logger.info(f"pod bound: {self.bound}")
        else:
            raise Exception(f"ERROR -- incorrect ffd_type <{self.ffd_type}>")
    else:
        self.ffd = self.FFDClass(self.dat_file, self.n_design, **self.config["ffd"])

set_gmsh_mesh_class()

Instantiates the mesher class as custom if found, as one of the default meshers otherwise.

Source code in aero_optim/optim/optimizer.py
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
def set_gmsh_mesh_class(self):
    """
    **Instantiates** the mesher class as custom if found,
    as one of the default meshers otherwise.
    """
    self.MeshClass = (
        get_custom_class(self.custom_file, "CustomMesh") if self.custom_file else None
    )
    if not self.MeshClass:
        if self.study_type == STUDY_TYPE[0]:
            self.MeshClass = NACABaseMesh
        elif self.study_type == STUDY_TYPE[1]:
            self.MeshClass = NACABlockMesh
        elif self.study_type == STUDY_TYPE[2]:
            self.MeshClass = CascadeMesh
        else:
            raise Exception(f"ERROR -- incorrect study_type <{self.study_type}>")

set_inner()

Sets some use-case specific inner variables:

Source code in aero_optim/optim/optimizer.py
228
229
230
231
232
def set_inner(self):
    """
    **Sets** some use-case specific inner variables:
    """
    logger.info("set_inner not implemented")

set_simulator_class() abstractmethod

Instantiates the simulator class with CustomSimulator if found.

Source code in aero_optim/optim/optimizer.py
372
373
374
375
376
377
378
379
@abstractmethod
def set_simulator_class(self):
    """
    Instantiates the simulator class with CustomSimulator if found.
    """
    self.SimulatorClass = (
        get_custom_class(self.custom_file, "CustomSimulator") if self.custom_file else None
    )

optim.optimizer.WolfOptimizer

Bases: Optimizer, ABC

This class implements a Wolf based Optimizer.

Source code in aero_optim/optim/optimizer.py
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
class WolfOptimizer(Optimizer, ABC):
    """
    This class implements a Wolf based Optimizer.
    """
    def __init__(self, config: dict):
        """
        Instantiates the WolfOptimizer object.

        **Input**

        - config (dict): the config file dictionary.
        """
        super().__init__(config)

    def set_simulator_class(self):
        """
        **Sets** the simulator class as custom if found, as WolfSimulator otherwise.
        """
        super().set_simulator_class()
        if not self.SimulatorClass:
            self.SimulatorClass = WolfSimulator

    def set_inner(self):
        """
        **Sets** some use-case specific inner variables:

        - baseline_CD (float): the drag coefficient of the baseline geometry.
        - baseline_CL (float): the lift coefficient of the baseline geometry.
        - baseline_area (float): the baseline area that is used as a structural constraint.
        - area_margin (float): area tolerance margin given as a percentage wrt baseline_area</br>
            i.e. a candidate with an area greater/smaller than +/- area_margin % of the
            baseline_area will be penalized.
        - penalty (list): a [key, value] constraint not to be worsen by the optimization.
        - constraint (bool): constraints are applied (True) or not (False)
        """
        self.baseline_CD: float = self.config["optim"].get("baseline_CD", 0.15)
        self.baseline_CL: float = self.config["optim"].get("baseline_CL", 0.36)
        self.baseline_area: float = abs(get_area(self.ffd.pts))
        self.area_margin: float = self.config["optim"].get("area_margin", 40.) / 100.
        self.penalty: list = self.config["optim"].get("penalty", ["CL", self.baseline_CL])
        self.constraint: bool = self.config["optim"].get("constraint", True)

    def plot_generation(
            self,
            gid: int,
            sorted_idx: np.ndarray,
            gen_fitness: np.ndarray,
            fig_name: str
    ):
        """
        **Plots** the results of the last evaluated generation.
        **Saves** the graph in the output directory.
        """
        baseline: np.ndarray = self.ffd.pts
        profiles: list[np.ndarray] = self.ffd_profiles[gid]
        res_dict = self.simulator.df_dict[gid]
        df_key = res_dict[0].columns  # "ResTot", "CD", "CL", "ResCD", "ResCL", "x", "y", "Cp"

        cmap = mpl.colormaps[self.cmap].resampled(self.n_plt)
        colors = cmap(np.linspace(0, 1, self.n_plt))
        # subplot construction
        fig = plt.figure(figsize=(16, 12))
        ax1 = plt.subplot(2, 1, 1)  # profiles
        ax2 = plt.subplot(2, 3, 4)  # ResTot
        ax3 = plt.subplot(2, 3, 5)  # CD & CL
        ax4 = plt.subplot(2, 3, 6)  # fitness (CD)
        plt.subplots_adjust(wspace=0.25)
        ax1.plot(baseline[:, 0], baseline[:, 1], color="k", lw=2, ls="--", label="baseline")
        ax3.axhline(y=self.baseline_CD, color='k', label="baseline")
        ax3.axhline(y=self.baseline_CL, color='k', linestyle="--", label="baseline")
        ax4.axhline(y=self.baseline_CD, color='k', linestyle="--", label="baseline")
        # loop over candidates through the last generated profiles
        for color, cid in enumerate(sorted_idx):
            ax1.plot(profiles[cid][:, 0], profiles[cid][:, 1], color=colors[color], label=f"c{cid}")
            res_dict[cid][df_key[0]].plot(ax=ax2, color=colors[color], label=f"c{cid}")  # ResTot
            res_dict[cid][df_key[1]].plot(ax=ax3, color=colors[color], label=f"{df_key[1]} c{cid}")
            res_dict[cid][df_key[2]].plot(
                ax=ax3, color=colors[color], ls="--", label=f"{df_key[2]} c{cid}"
            )
            ax4.scatter(cid, gen_fitness[cid], color=colors[color], label=f"c{cid}")
        # legend and title
        fig.suptitle(
            f"Generation {gid} - {self.n_plt} top candidates", size="x-large", weight="bold", y=0.93
        )
        # top
        ax1.set_title("FFD profiles", weight="bold")
        ax1.legend(loc="center left", bbox_to_anchor=(1, 0.5))
        ax1.set_xlabel('$x$ $[m]$')
        ax1.set_ylabel('$y$ $[m]$')
        # bottom left
        ax2.set_title(f"{df_key[0]}", weight="bold")
        ax2.set_yscale("log")
        ax2.set_xlabel('iteration $[\\cdot]$')
        ax2.set_ylabel('residual $[\\cdot]$')
        # bottom center
        ax3.set_title(f"{df_key[1]} & {df_key[2]}", weight="bold")
        ax3.set_xlabel('iteration $[\\cdot]$')
        ax3.set_ylabel('aerodynamic coefficients $[\\cdot]$')
        # bottom right
        ax4.xaxis.set_major_locator(MaxNLocator(integer=True))
        ax4.set_title(f"fitness: {self.QoI}", weight="bold")
        ax4.legend(loc="center left", bbox_to_anchor=(1, 0.5))
        ax4.set_xlabel('candidate $[\\cdot]$')
        ax4.set_ylabel("fitness")
        # save figure as png
        logger.info(f"saving {fig_name} to {self.outdir}")
        plt.savefig(os.path.join(self.figdir, fig_name), bbox_inches='tight')
        plt.close()

    def save_results(self):
        super().save_results()
        with open(os.path.join(self.outdir, "df_dict.pkl"), "wb") as handle:
            pickle.dump(self.simulator.df_dict, handle)
        logger.info(f"results dictionary saved to {self.outdir}")

    @abstractmethod
    def apply_constraints(self, *args, **kwargs):
        """
        Looks for constraints violations.
        """

    @abstractmethod
    def final_observe(self, *args, **kwargs):
        """
        Plots convergence progress by plotting the fitness values
        obtained with the successive generations.
        """

__init__(config: dict)

Instantiates the WolfOptimizer object.

Input

  • config (dict): the config file dictionary.
Source code in aero_optim/optim/optimizer.py
392
393
394
395
396
397
398
399
400
def __init__(self, config: dict):
    """
    Instantiates the WolfOptimizer object.

    **Input**

    - config (dict): the config file dictionary.
    """
    super().__init__(config)

apply_constraints(*args, **kwargs) abstractmethod

Looks for constraints violations.

Source code in aero_optim/optim/optimizer.py
503
504
505
506
507
@abstractmethod
def apply_constraints(self, *args, **kwargs):
    """
    Looks for constraints violations.
    """

final_observe(*args, **kwargs) abstractmethod

Plots convergence progress by plotting the fitness values obtained with the successive generations.

Source code in aero_optim/optim/optimizer.py
509
510
511
512
513
514
@abstractmethod
def final_observe(self, *args, **kwargs):
    """
    Plots convergence progress by plotting the fitness values
    obtained with the successive generations.
    """

plot_generation(gid: int, sorted_idx: np.ndarray, gen_fitness: np.ndarray, fig_name: str)

Plots the results of the last evaluated generation. Saves the graph in the output directory.

Source code in aero_optim/optim/optimizer.py
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
def plot_generation(
        self,
        gid: int,
        sorted_idx: np.ndarray,
        gen_fitness: np.ndarray,
        fig_name: str
):
    """
    **Plots** the results of the last evaluated generation.
    **Saves** the graph in the output directory.
    """
    baseline: np.ndarray = self.ffd.pts
    profiles: list[np.ndarray] = self.ffd_profiles[gid]
    res_dict = self.simulator.df_dict[gid]
    df_key = res_dict[0].columns  # "ResTot", "CD", "CL", "ResCD", "ResCL", "x", "y", "Cp"

    cmap = mpl.colormaps[self.cmap].resampled(self.n_plt)
    colors = cmap(np.linspace(0, 1, self.n_plt))
    # subplot construction
    fig = plt.figure(figsize=(16, 12))
    ax1 = plt.subplot(2, 1, 1)  # profiles
    ax2 = plt.subplot(2, 3, 4)  # ResTot
    ax3 = plt.subplot(2, 3, 5)  # CD & CL
    ax4 = plt.subplot(2, 3, 6)  # fitness (CD)
    plt.subplots_adjust(wspace=0.25)
    ax1.plot(baseline[:, 0], baseline[:, 1], color="k", lw=2, ls="--", label="baseline")
    ax3.axhline(y=self.baseline_CD, color='k', label="baseline")
    ax3.axhline(y=self.baseline_CL, color='k', linestyle="--", label="baseline")
    ax4.axhline(y=self.baseline_CD, color='k', linestyle="--", label="baseline")
    # loop over candidates through the last generated profiles
    for color, cid in enumerate(sorted_idx):
        ax1.plot(profiles[cid][:, 0], profiles[cid][:, 1], color=colors[color], label=f"c{cid}")
        res_dict[cid][df_key[0]].plot(ax=ax2, color=colors[color], label=f"c{cid}")  # ResTot
        res_dict[cid][df_key[1]].plot(ax=ax3, color=colors[color], label=f"{df_key[1]} c{cid}")
        res_dict[cid][df_key[2]].plot(
            ax=ax3, color=colors[color], ls="--", label=f"{df_key[2]} c{cid}"
        )
        ax4.scatter(cid, gen_fitness[cid], color=colors[color], label=f"c{cid}")
    # legend and title
    fig.suptitle(
        f"Generation {gid} - {self.n_plt} top candidates", size="x-large", weight="bold", y=0.93
    )
    # top
    ax1.set_title("FFD profiles", weight="bold")
    ax1.legend(loc="center left", bbox_to_anchor=(1, 0.5))
    ax1.set_xlabel('$x$ $[m]$')
    ax1.set_ylabel('$y$ $[m]$')
    # bottom left
    ax2.set_title(f"{df_key[0]}", weight="bold")
    ax2.set_yscale("log")
    ax2.set_xlabel('iteration $[\\cdot]$')
    ax2.set_ylabel('residual $[\\cdot]$')
    # bottom center
    ax3.set_title(f"{df_key[1]} & {df_key[2]}", weight="bold")
    ax3.set_xlabel('iteration $[\\cdot]$')
    ax3.set_ylabel('aerodynamic coefficients $[\\cdot]$')
    # bottom right
    ax4.xaxis.set_major_locator(MaxNLocator(integer=True))
    ax4.set_title(f"fitness: {self.QoI}", weight="bold")
    ax4.legend(loc="center left", bbox_to_anchor=(1, 0.5))
    ax4.set_xlabel('candidate $[\\cdot]$')
    ax4.set_ylabel("fitness")
    # save figure as png
    logger.info(f"saving {fig_name} to {self.outdir}")
    plt.savefig(os.path.join(self.figdir, fig_name), bbox_inches='tight')
    plt.close()

set_inner()

Sets some use-case specific inner variables:

  • baseline_CD (float): the drag coefficient of the baseline geometry.
  • baseline_CL (float): the lift coefficient of the baseline geometry.
  • baseline_area (float): the baseline area that is used as a structural constraint.
  • area_margin (float): area tolerance margin given as a percentage wrt baseline_area
    i.e. a candidate with an area greater/smaller than +/- area_margin % of the baseline_area will be penalized.
  • penalty (list): a [key, value] constraint not to be worsen by the optimization.
  • constraint (bool): constraints are applied (True) or not (False)
Source code in aero_optim/optim/optimizer.py
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
def set_inner(self):
    """
    **Sets** some use-case specific inner variables:

    - baseline_CD (float): the drag coefficient of the baseline geometry.
    - baseline_CL (float): the lift coefficient of the baseline geometry.
    - baseline_area (float): the baseline area that is used as a structural constraint.
    - area_margin (float): area tolerance margin given as a percentage wrt baseline_area</br>
        i.e. a candidate with an area greater/smaller than +/- area_margin % of the
        baseline_area will be penalized.
    - penalty (list): a [key, value] constraint not to be worsen by the optimization.
    - constraint (bool): constraints are applied (True) or not (False)
    """
    self.baseline_CD: float = self.config["optim"].get("baseline_CD", 0.15)
    self.baseline_CL: float = self.config["optim"].get("baseline_CL", 0.36)
    self.baseline_area: float = abs(get_area(self.ffd.pts))
    self.area_margin: float = self.config["optim"].get("area_margin", 40.) / 100.
    self.penalty: list = self.config["optim"].get("penalty", ["CL", self.baseline_CL])
    self.constraint: bool = self.config["optim"].get("constraint", True)

set_simulator_class()

Sets the simulator class as custom if found, as WolfSimulator otherwise.

Source code in aero_optim/optim/optimizer.py
402
403
404
405
406
407
408
def set_simulator_class(self):
    """
    **Sets** the simulator class as custom if found, as WolfSimulator otherwise.
    """
    super().set_simulator_class()
    if not self.SimulatorClass:
        self.SimulatorClass = WolfSimulator

optim.optimizer.DebugOptimizer

Bases: Optimizer, ABC

Source code in aero_optim/optim/optimizer.py
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
class DebugOptimizer(Optimizer, ABC):
    def __init__(self, config: dict):
        """
        Dummy init.
        """
        super().__init__(config, debug=True)

    def set_simulator_class(self):
        """
        **Sets** the simulator class as custom if found, as DebugSimulator otherwise.
        """
        super().set_simulator_class()
        if not self.SimulatorClass:
            self.SimulatorClass = DebugSimulator

    def set_inner(self):
        return

    def execute_candidates(self, candidates: list[Individual] | np.ndarray, gid: int):
        """
        **Executes** all candidates and **waits** for them to finish.
        """
        logger.info(f"evaluating candidates of generation {self.gen_ctr}..")
        self.inputs.append([])
        for cid, cand in enumerate(candidates):
            self.inputs[gid].append(np.array(cand))
            logger.debug(f"g{gid}, c{cid} cand {cand}")
            self.simulator.execute_sim(cand, gid, cid)
            logger.debug(f"g{gid}, c{cid} cand {cand}, "
                         f"fitness {self.simulator.df_dict[gid][cid]['result'].iloc[-1]}")

__init__(config: dict)

Dummy init.

Source code in aero_optim/optim/optimizer.py
518
519
520
521
522
def __init__(self, config: dict):
    """
    Dummy init.
    """
    super().__init__(config, debug=True)

execute_candidates(candidates: list[Individual] | np.ndarray, gid: int)

Executes all candidates and waits for them to finish.

Source code in aero_optim/optim/optimizer.py
535
536
537
538
539
540
541
542
543
544
545
546
def execute_candidates(self, candidates: list[Individual] | np.ndarray, gid: int):
    """
    **Executes** all candidates and **waits** for them to finish.
    """
    logger.info(f"evaluating candidates of generation {self.gen_ctr}..")
    self.inputs.append([])
    for cid, cand in enumerate(candidates):
        self.inputs[gid].append(np.array(cand))
        logger.debug(f"g{gid}, c{cid} cand {cand}")
        self.simulator.execute_sim(cand, gid, cid)
        logger.debug(f"g{gid}, c{cid} cand {cand}, "
                     f"fitness {self.simulator.df_dict[gid][cid]['result'].iloc[-1]}")

set_simulator_class()

Sets the simulator class as custom if found, as DebugSimulator otherwise.

Source code in aero_optim/optim/optimizer.py
524
525
526
527
528
529
530
def set_simulator_class(self):
    """
    **Sets** the simulator class as custom if found, as DebugSimulator otherwise.
    """
    super().set_simulator_class()
    if not self.SimulatorClass:
        self.SimulatorClass = DebugSimulator

inspyred Optimizers

optim.inspyred_optimizer.InspyredWolfOptimizer

Bases: WolfOptimizer

This class implements a Wolf based Optimizer.

Source code in aero_optim/optim/inspyred_optimizer.py
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
class InspyredWolfOptimizer(WolfOptimizer):
    """
    This class implements a Wolf based Optimizer.
    """
    def _evaluate(self, candidates: list[Individual], args: dict) -> list[float | list[float]]:
        """
        **Executes** Wolf simulations, **extracts** results
        and **returns** the list of candidates QoIs.

        Note:
            __candidates__ and __args__ are inspyred mandatory arguments</br>
            see https://pythonhosted.org/inspyred/tutorial.html#the-evaluator
        """
        gid = self.gen_ctr

        # execute all candidates
        self.execute_candidates(candidates, gid)

        # add penalty to the candidates fitness
        for cid, _ in enumerate(candidates):
            self.J.append(
                self.apply_constraints(
                    gid, cid,
                    self.ffd_profiles[gid][cid],
                    self.simulator.df_dict[gid][cid][self.penalty[0]].iloc[-1]
                )
            )
            self.J[-1] += self.simulator.df_dict[gid][cid][self.QoI].iloc[-1]

        self.gen_ctr += 1
        return self.J[-self.doe_size:]

    def apply_constraints(
            self, gid: int, cid: int, ffd_profile: np.ndarray, pen_value: float
    ) -> float:
        """
        **Returns** a penalty value based on some specific constraints</br>
        see https://inspyred.readthedocs.io/en/latest/recipes.html#constraint-selection
        """
        if not self.constraint:
            return 0.
        area_cond: bool = (
            abs(get_area(ffd_profile)) > (1. + self.area_margin) * self.baseline_area
            or abs(get_area(ffd_profile)) < (1. - self.area_margin) * self.baseline_area
        )
        penalty_cond: bool = pen_value < self.penalty[-1]
        if area_cond or penalty_cond:
            logger.info(f"penalized candidate g{gid}, c{cid} "
                        f"with area {abs(get_area(ffd_profile))} and CL {pen_value}")
            return 1.
        return 0.

    def _observe(
            self,
            population: list[Individual],
            num_generations: int,
            num_evaluations: int,
            args: dict
    ):
        """
        **Plots** the n_plt best results each time a generation has been evaluated:</br>
        > the simulations residuals,</br>
        > the simulations CD & CL,</br>
        > the candidates fitness,</br>
        > the baseline and deformed profiles.

        Note:
            __num_generations__, __num_evaluations__ and __args__
            are inspyred mandatory arguments</br>
            see https://pythonhosted.org/inspyred/examples.html#custom-observer
        """
        gid = num_generations

        # extract generation best profiles
        fitness: np.ndarray = np.array(self.J[-self.doe_size:])
        sorted_idx = (
            np.argsort(fitness)[-self.n_plt:] if self.maximize else np.argsort(fitness)[:self.n_plt]
        )

        # compute population statistics
        self.compute_statistics(np.array([ind.fitness for ind in population]))

        logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
        logger.debug(f"g{gid} J-fitnesses (candidates): {fitness}")
        logger.debug(f"g{gid} P-fitness (population) {[ind.fitness for ind in population]}")

        # plot settings
        fig_name = f"inspyred_g{num_generations}.png"
        self.plot_generation(gid, sorted_idx, fitness, fig_name)

    def final_observe(self, *args, **kwargs):
        """
        **Plots** convergence progress by plotting the fitness values
        obtained with the successive generations</br>
        see https://pythonhosted.org/inspyred/reference.html#inspyred.ec.analysis.generation_plot
        """
        fig_name = f"inspyred_optim_g{self.gen_ctr - 1}_c{self.doe_size}.png"
        self.plot_progress(self.gen_ctr - 1, fig_name, baseline_value=self.baseline_CD)
_evaluate(candidates: list[Individual], args: dict) -> list[float | list[float]]

Executes Wolf simulations, extracts results and returns the list of candidates QoIs.

Note

candidates and args are inspyred mandatory arguments
see https://pythonhosted.org/inspyred/tutorial.html#the-evaluator

Source code in aero_optim/optim/inspyred_optimizer.py
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
def _evaluate(self, candidates: list[Individual], args: dict) -> list[float | list[float]]:
    """
    **Executes** Wolf simulations, **extracts** results
    and **returns** the list of candidates QoIs.

    Note:
        __candidates__ and __args__ are inspyred mandatory arguments</br>
        see https://pythonhosted.org/inspyred/tutorial.html#the-evaluator
    """
    gid = self.gen_ctr

    # execute all candidates
    self.execute_candidates(candidates, gid)

    # add penalty to the candidates fitness
    for cid, _ in enumerate(candidates):
        self.J.append(
            self.apply_constraints(
                gid, cid,
                self.ffd_profiles[gid][cid],
                self.simulator.df_dict[gid][cid][self.penalty[0]].iloc[-1]
            )
        )
        self.J[-1] += self.simulator.df_dict[gid][cid][self.QoI].iloc[-1]

    self.gen_ctr += 1
    return self.J[-self.doe_size:]
_observe(population: list[Individual], num_generations: int, num_evaluations: int, args: dict)

Plots the n_plt best results each time a generation has been evaluated:

the simulations residuals,
the simulations CD & CL,
the candidates fitness,
the baseline and deformed profiles.

Note

num_generations, num_evaluations and args are inspyred mandatory arguments
see https://pythonhosted.org/inspyred/examples.html#custom-observer

Source code in aero_optim/optim/inspyred_optimizer.py
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
def _observe(
        self,
        population: list[Individual],
        num_generations: int,
        num_evaluations: int,
        args: dict
):
    """
    **Plots** the n_plt best results each time a generation has been evaluated:</br>
    > the simulations residuals,</br>
    > the simulations CD & CL,</br>
    > the candidates fitness,</br>
    > the baseline and deformed profiles.

    Note:
        __num_generations__, __num_evaluations__ and __args__
        are inspyred mandatory arguments</br>
        see https://pythonhosted.org/inspyred/examples.html#custom-observer
    """
    gid = num_generations

    # extract generation best profiles
    fitness: np.ndarray = np.array(self.J[-self.doe_size:])
    sorted_idx = (
        np.argsort(fitness)[-self.n_plt:] if self.maximize else np.argsort(fitness)[:self.n_plt]
    )

    # compute population statistics
    self.compute_statistics(np.array([ind.fitness for ind in population]))

    logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
    logger.debug(f"g{gid} J-fitnesses (candidates): {fitness}")
    logger.debug(f"g{gid} P-fitness (population) {[ind.fitness for ind in population]}")

    # plot settings
    fig_name = f"inspyred_g{num_generations}.png"
    self.plot_generation(gid, sorted_idx, fitness, fig_name)
apply_constraints(gid: int, cid: int, ffd_profile: np.ndarray, pen_value: float) -> float

Returns a penalty value based on some specific constraints
see https://inspyred.readthedocs.io/en/latest/recipes.html#constraint-selection

Source code in aero_optim/optim/inspyred_optimizer.py
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
def apply_constraints(
        self, gid: int, cid: int, ffd_profile: np.ndarray, pen_value: float
) -> float:
    """
    **Returns** a penalty value based on some specific constraints</br>
    see https://inspyred.readthedocs.io/en/latest/recipes.html#constraint-selection
    """
    if not self.constraint:
        return 0.
    area_cond: bool = (
        abs(get_area(ffd_profile)) > (1. + self.area_margin) * self.baseline_area
        or abs(get_area(ffd_profile)) < (1. - self.area_margin) * self.baseline_area
    )
    penalty_cond: bool = pen_value < self.penalty[-1]
    if area_cond or penalty_cond:
        logger.info(f"penalized candidate g{gid}, c{cid} "
                    f"with area {abs(get_area(ffd_profile))} and CL {pen_value}")
        return 1.
    return 0.
final_observe(*args, **kwargs)

Plots convergence progress by plotting the fitness values obtained with the successive generations
see https://pythonhosted.org/inspyred/reference.html#inspyred.ec.analysis.generation_plot

Source code in aero_optim/optim/inspyred_optimizer.py
119
120
121
122
123
124
125
126
def final_observe(self, *args, **kwargs):
    """
    **Plots** convergence progress by plotting the fitness values
    obtained with the successive generations</br>
    see https://pythonhosted.org/inspyred/reference.html#inspyred.ec.analysis.generation_plot
    """
    fig_name = f"inspyred_optim_g{self.gen_ctr - 1}_c{self.doe_size}.png"
    self.plot_progress(self.gen_ctr - 1, fig_name, baseline_value=self.baseline_CD)

optim.inspyred_optimizer.InspyredDebugOptimizer

Bases: DebugOptimizer

Source code in aero_optim/optim/inspyred_optimizer.py
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
class InspyredDebugOptimizer(DebugOptimizer):
    def _evaluate(self, candidates: list[Individual], args: dict) -> list[float | list[float]]:
        """
        **Executes** dummy simulations, **extracts** results
        and **returns** the list of candidates QoIs.
        """
        gid = self.gen_ctr

        # execute all candidates
        self.execute_candidates(candidates, gid)

        for cid, _ in enumerate(candidates):
            self.J.append(self.simulator.df_dict[gid][cid]["result"].iloc[-1])

        self.gen_ctr += 1
        return self.J[-self.doe_size:]

    def _observe(
            self,
            population: list[Individual],
            num_generations: int,
            num_evaluations: int,
            args: dict
    ):
        """
        Dummy _observe function.
        """
        # extract best profiles
        gid = num_generations
        fitness: np.ndarray = np.array(self.J[-self.doe_size:])
        sorted_idx = np.argsort(fitness, kind="stable")[:self.n_plt]
        logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
        logger.debug(f"g{gid} J-fitnesses (candidates): {fitness}")
        logger.debug(f"g{gid} P-fitness (population) {[ind.fitness for ind in population]}")

        # compute population statistics
        self.compute_statistics(np.array([ind.fitness for ind in population]))

    def final_observe(self):
        """
        Dummy final_observe function.
        """
        fig_name = f"inspyred_optim_g{self.gen_ctr - 1}_c{self.doe_size}.png"
        self.plot_progress(self.gen_ctr - 1, fig_name)
_evaluate(candidates: list[Individual], args: dict) -> list[float | list[float]]

Executes dummy simulations, extracts results and returns the list of candidates QoIs.

Source code in aero_optim/optim/inspyred_optimizer.py
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
def _evaluate(self, candidates: list[Individual], args: dict) -> list[float | list[float]]:
    """
    **Executes** dummy simulations, **extracts** results
    and **returns** the list of candidates QoIs.
    """
    gid = self.gen_ctr

    # execute all candidates
    self.execute_candidates(candidates, gid)

    for cid, _ in enumerate(candidates):
        self.J.append(self.simulator.df_dict[gid][cid]["result"].iloc[-1])

    self.gen_ctr += 1
    return self.J[-self.doe_size:]
_observe(population: list[Individual], num_generations: int, num_evaluations: int, args: dict)

Dummy _observe function.

Source code in aero_optim/optim/inspyred_optimizer.py
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
def _observe(
        self,
        population: list[Individual],
        num_generations: int,
        num_evaluations: int,
        args: dict
):
    """
    Dummy _observe function.
    """
    # extract best profiles
    gid = num_generations
    fitness: np.ndarray = np.array(self.J[-self.doe_size:])
    sorted_idx = np.argsort(fitness, kind="stable")[:self.n_plt]
    logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
    logger.debug(f"g{gid} J-fitnesses (candidates): {fitness}")
    logger.debug(f"g{gid} P-fitness (population) {[ind.fitness for ind in population]}")

    # compute population statistics
    self.compute_statistics(np.array([ind.fitness for ind in population]))
final_observe()

Dummy final_observe function.

Source code in aero_optim/optim/inspyred_optimizer.py
167
168
169
170
171
172
def final_observe(self):
    """
    Dummy final_observe function.
    """
    fig_name = f"inspyred_optim_g{self.gen_ctr - 1}_c{self.doe_size}.png"
    self.plot_progress(self.gen_ctr - 1, fig_name)

pymoo Optimizers

optim.pymoo_optimizer.PymooWolfOptimizer

Bases: WolfOptimizer, Problem

This class implements a Wolf based Optimizer.

Source code in aero_optim/optim/pymoo_optimizer.py
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
class PymooWolfOptimizer(WolfOptimizer, Problem):
    """
    This class implements a Wolf based Optimizer.
    """
    def __init__(self, config: dict):
        """
        Instantiates the WolfOptimizer object.

        **Input**

        - config (dict): the config file dictionary.
        """
        WolfOptimizer.__init__(self, config)
        Problem.__init__(
            self, n_var=self.n_design, n_obj=1, n_ieq_constr=2, xl=self.bound[0], xu=self.bound[1]
        )

    def _evaluate(self, X: np.ndarray, out: np.ndarray, *args, **kwargs):
        """
        **Executes** Wolf simulations, **extracts** results
        and **returns** arrays of candidates QoIs and constraints.
        """
        gid = self.gen_ctr

        # execute all candidates
        self.execute_candidates(X, gid)

        # update candidates fitness
        self.J.extend([
            self.simulator.df_dict[gid][cid][self.QoI].iloc[-1] for cid in range(len(X))
        ])

        out["F"] = np.array(self.J[-self.doe_size:])
        out["G"] = self.apply_constraints(gid)
        self._observe(out["F"])
        self.gen_ctr += 1

    def apply_constraints(self, gid: int) -> np.ndarray:
        """
        **Returns** a constraint array ensuring negative inequality</br>
        see https://pymoo.org/constraints/index.html
        """
        out = []
        if not self.constraint:
            return np.row_stack([[-1, -1] for _ in range(len(self.ffd_profiles[gid]))])
        for cid, pro in enumerate(self.ffd_profiles[gid]):
            ieq_1 = (
                abs(abs(get_area(pro)) - self.baseline_area) / self.baseline_area - self.area_margin
            )
            ieq_2 = self.penalty[-1] - self.simulator.df_dict[gid][cid][self.penalty[0]].iloc[-1]
            if ieq_1 > 0 or ieq_2 > 0:
                logger.info(f"penalized candidate g{gid}, c{cid} "
                            f"with area {abs(get_area(pro))} "
                            f"and CL {self.simulator.df_dict[gid][cid][self.penalty[0]].iloc[-1]}")
            out.append([ieq_1, ieq_2])
        return np.row_stack(out)

    def _observe(self, pop_fitness: np.ndarray):
        """
        **Plots** the n_plt best results each time a generation has been evaluated:</br>
        > the simulations residuals,</br>
        > the simulations CD & CL,</br>
        > the candidates fitness,</br>
        > the baseline and deformed profiles.
        """
        gid = self.gen_ctr

        # extract generation best profiles
        sorted_idx = np.argsort(pop_fitness, kind="stable")[:self.n_plt]

        # compute population statistics
        self.compute_statistics(pop_fitness)

        logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
        logger.debug(f"g{gid} J-fitnesses: {pop_fitness}")

        # plot settings
        fig_name = f"pymoo_g{gid}.png"
        self.plot_generation(gid, sorted_idx, pop_fitness, fig_name)

    def final_observe(self, *args, **kwargs):
        """
        **Plots** convergence progress by plotting the fitness values
        obtained with the successive generations
        """
        fig_name = f"pymoo_optim_g{self.gen_ctr}_c{self.doe_size}.png"
        self.plot_progress(self.gen_ctr, fig_name, baseline_value=self.baseline_CD)
__init__(config: dict)

Instantiates the WolfOptimizer object.

Input

  • config (dict): the config file dictionary.
Source code in aero_optim/optim/pymoo_optimizer.py
33
34
35
36
37
38
39
40
41
42
43
44
def __init__(self, config: dict):
    """
    Instantiates the WolfOptimizer object.

    **Input**

    - config (dict): the config file dictionary.
    """
    WolfOptimizer.__init__(self, config)
    Problem.__init__(
        self, n_var=self.n_design, n_obj=1, n_ieq_constr=2, xl=self.bound[0], xu=self.bound[1]
    )
_evaluate(X: np.ndarray, out: np.ndarray, *args, **kwargs)

Executes Wolf simulations, extracts results and returns arrays of candidates QoIs and constraints.

Source code in aero_optim/optim/pymoo_optimizer.py
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
def _evaluate(self, X: np.ndarray, out: np.ndarray, *args, **kwargs):
    """
    **Executes** Wolf simulations, **extracts** results
    and **returns** arrays of candidates QoIs and constraints.
    """
    gid = self.gen_ctr

    # execute all candidates
    self.execute_candidates(X, gid)

    # update candidates fitness
    self.J.extend([
        self.simulator.df_dict[gid][cid][self.QoI].iloc[-1] for cid in range(len(X))
    ])

    out["F"] = np.array(self.J[-self.doe_size:])
    out["G"] = self.apply_constraints(gid)
    self._observe(out["F"])
    self.gen_ctr += 1
_observe(pop_fitness: np.ndarray)

Plots the n_plt best results each time a generation has been evaluated:

the simulations residuals,
the simulations CD & CL,
the candidates fitness,
the baseline and deformed profiles.

Source code in aero_optim/optim/pymoo_optimizer.py
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
def _observe(self, pop_fitness: np.ndarray):
    """
    **Plots** the n_plt best results each time a generation has been evaluated:</br>
    > the simulations residuals,</br>
    > the simulations CD & CL,</br>
    > the candidates fitness,</br>
    > the baseline and deformed profiles.
    """
    gid = self.gen_ctr

    # extract generation best profiles
    sorted_idx = np.argsort(pop_fitness, kind="stable")[:self.n_plt]

    # compute population statistics
    self.compute_statistics(pop_fitness)

    logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
    logger.debug(f"g{gid} J-fitnesses: {pop_fitness}")

    # plot settings
    fig_name = f"pymoo_g{gid}.png"
    self.plot_generation(gid, sorted_idx, pop_fitness, fig_name)
apply_constraints(gid: int) -> np.ndarray

Returns a constraint array ensuring negative inequality
see https://pymoo.org/constraints/index.html

Source code in aero_optim/optim/pymoo_optimizer.py
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
def apply_constraints(self, gid: int) -> np.ndarray:
    """
    **Returns** a constraint array ensuring negative inequality</br>
    see https://pymoo.org/constraints/index.html
    """
    out = []
    if not self.constraint:
        return np.row_stack([[-1, -1] for _ in range(len(self.ffd_profiles[gid]))])
    for cid, pro in enumerate(self.ffd_profiles[gid]):
        ieq_1 = (
            abs(abs(get_area(pro)) - self.baseline_area) / self.baseline_area - self.area_margin
        )
        ieq_2 = self.penalty[-1] - self.simulator.df_dict[gid][cid][self.penalty[0]].iloc[-1]
        if ieq_1 > 0 or ieq_2 > 0:
            logger.info(f"penalized candidate g{gid}, c{cid} "
                        f"with area {abs(get_area(pro))} "
                        f"and CL {self.simulator.df_dict[gid][cid][self.penalty[0]].iloc[-1]}")
        out.append([ieq_1, ieq_2])
    return np.row_stack(out)
final_observe(*args, **kwargs)

Plots convergence progress by plotting the fitness values obtained with the successive generations

Source code in aero_optim/optim/pymoo_optimizer.py
109
110
111
112
113
114
115
def final_observe(self, *args, **kwargs):
    """
    **Plots** convergence progress by plotting the fitness values
    obtained with the successive generations
    """
    fig_name = f"pymoo_optim_g{self.gen_ctr}_c{self.doe_size}.png"
    self.plot_progress(self.gen_ctr, fig_name, baseline_value=self.baseline_CD)

optim.pymoo_optimizer.PymooDebugOptimizer

Bases: DebugOptimizer, Problem

Source code in aero_optim/optim/pymoo_optimizer.py
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
class PymooDebugOptimizer(DebugOptimizer, Problem):
    def __init__(self, config: dict):
        """
        Dummy init.
        """
        DebugOptimizer.__init__(self, config)
        Problem.__init__(
            self, n_var=self.n_design, n_obj=1, n_ieq_constr=0, xl=self.bound[0], xu=self.bound[1]
        )

    def _evaluate(self, X: np.ndarray, out: np.ndarray, *args, **kwargs):
        """
        **Executes** dummy simulations, **extracts** results
        and **returns** the list of candidates QoIs.
        """
        gid = self.gen_ctr

        # execute all candidates
        self.execute_candidates(X, gid)

        for cid, _ in enumerate(X):
            self.J.append(self.simulator.df_dict[gid][cid]["result"].iloc[-1])

        out["F"] = np.array(self.J[-self.doe_size:])
        self._observe(out["F"])
        self.gen_ctr += 1

    def _observe(self, pop_fitness: np.ndarray):
        """
        Dummy _observe function.
        """
        # extract best profiles
        gid = self.gen_ctr
        sorted_idx = np.argsort(pop_fitness, kind="stable")[:self.n_plt]
        logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
        logger.debug(f"g{gid} J-fitnesses (candidates): {pop_fitness}")

        # compute population statistics
        self.compute_statistics(pop_fitness)

    def final_observe(self):
        """
        Dummy final_observe function.
        """
        logger.info(f"plotting populations statistics after {self.gen_ctr} generations..")
        fig_name = f"pymoo_optim_g{self.gen_ctr}_c{self.doe_size}.png"
        self.plot_progress(self.gen_ctr, fig_name)
__init__(config: dict)

Dummy init.

Source code in aero_optim/optim/pymoo_optimizer.py
119
120
121
122
123
124
125
126
def __init__(self, config: dict):
    """
    Dummy init.
    """
    DebugOptimizer.__init__(self, config)
    Problem.__init__(
        self, n_var=self.n_design, n_obj=1, n_ieq_constr=0, xl=self.bound[0], xu=self.bound[1]
    )
_evaluate(X: np.ndarray, out: np.ndarray, *args, **kwargs)

Executes dummy simulations, extracts results and returns the list of candidates QoIs.

Source code in aero_optim/optim/pymoo_optimizer.py
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
def _evaluate(self, X: np.ndarray, out: np.ndarray, *args, **kwargs):
    """
    **Executes** dummy simulations, **extracts** results
    and **returns** the list of candidates QoIs.
    """
    gid = self.gen_ctr

    # execute all candidates
    self.execute_candidates(X, gid)

    for cid, _ in enumerate(X):
        self.J.append(self.simulator.df_dict[gid][cid]["result"].iloc[-1])

    out["F"] = np.array(self.J[-self.doe_size:])
    self._observe(out["F"])
    self.gen_ctr += 1
_observe(pop_fitness: np.ndarray)

Dummy _observe function.

Source code in aero_optim/optim/pymoo_optimizer.py
145
146
147
148
149
150
151
152
153
154
155
156
def _observe(self, pop_fitness: np.ndarray):
    """
    Dummy _observe function.
    """
    # extract best profiles
    gid = self.gen_ctr
    sorted_idx = np.argsort(pop_fitness, kind="stable")[:self.n_plt]
    logger.info(f"extracting {self.n_plt} best profiles in g{gid}: {sorted_idx}..")
    logger.debug(f"g{gid} J-fitnesses (candidates): {pop_fitness}")

    # compute population statistics
    self.compute_statistics(pop_fitness)
final_observe()

Dummy final_observe function.

Source code in aero_optim/optim/pymoo_optimizer.py
158
159
160
161
162
163
164
def final_observe(self):
    """
    Dummy final_observe function.
    """
    logger.info(f"plotting populations statistics after {self.gen_ctr} generations..")
    fig_name = f"pymoo_optim_g{self.gen_ctr}_c{self.doe_size}.png"
    self.plot_progress(self.gen_ctr, fig_name)

Generator class

optim.generator.Generator

This class defines a custom generator based on scipy.qmc samplers.

Source code in aero_optim/optim/generator.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
class Generator:
    """
    This class defines a custom generator based on scipy.qmc samplers.
    """
    # some samplers available from scipy.qmc
    # see https://docs.scipy.org/doc/scipy/reference/stats.qmc.html
    sampler_list: list[str] = ["lhs", "halton", "sobol", "custom"]

    def __init__(self,
                 seed: int,
                 ndesign: int,
                 doe_size: int,
                 sampler_name: str,
                 bound: tuple[Any, ...],
                 custom_doe: str = ""):
        """
        Instantiates the Generator class with some optimization parameters and the sampler name.

        **Input**

        - seed (int): seed number of the sampler random number generator.
        - ndesign (int): the number of design variables (dimensions of the problem).
        - doe_size (int): the size of the initial and subsequent generations.
        - sampler_name (str): name of the sampling algorithm used to generate samples.
        - bound (tuple[Any, ...]): design variables boundaries.
        - custom_doe (str): path to the text file containing a custom doe.

        **Inner**

        - initial_doe (list[list[float]]): the initial generation sampled from the generator.
        """
        self.seed: int = seed
        self.ndesign: int = ndesign
        self.doe_size: int = doe_size
        self.sampler: Optional[qmc.QMCEngine] = self.get_sampler(
            "custom" if custom_doe else sampler_name
        )
        self.bound: tuple[Any, ...] = bound
        self.initial_doe: list[list[float]] = self.sample_doe(custom_doe)

    def get_sampler(self, sampler_name: str) -> Optional[qmc.QMCEngine]:
        """
        **Returns** scipy qmc sampler.
        """
        if sampler_name not in self.sampler_list:
            raise Exception(f"Unrecognized sampler {sampler_name}")
        else:
            return (
                qmc.LatinHypercube(d=self.ndesign, seed=self.seed) if sampler_name == "lhs"
                else qmc.Halton(d=self.ndesign, seed=self.seed) if sampler_name == "halton"
                else qmc.Sobol(d=self.ndesign, seed=self.seed) if sampler_name == "sobol"
                else None
            )

    def sample_doe(self, custom_doe: str) -> list[list[float]]:
        return (
            self.sampler.random(n=self.doe_size).tolist() if self.sampler
            else [
                [float(xi) for xi in X.strip().split()]
                for X in open(custom_doe, "r").read().splitlines()
            ]
        )

    def _ins_generator(self, random: Random, args: dict) -> list[float]:
        """
        **Returns** a single sample from the initial generation.

        Note:
            __random__ and __args__ are inspyred mandatory arguments</br>
            see https://pythonhosted.org/inspyred/tutorial.html#the-generator
        """
        element = self.initial_doe.pop(0)
        return qmc.scale([element], *self.bound).tolist()[0] if self.sampler else element

    def _pymoo_generator(self) -> np.ndarray:
        """
        **Returns** all samples from the initial generation.
        """
        return (
            qmc.scale(self.initial_doe, *self.bound) if self.sampler else np.array(self.initial_doe)
        )

__init__(seed: int, ndesign: int, doe_size: int, sampler_name: str, bound: tuple[Any, ...], custom_doe: str = '')

Instantiates the Generator class with some optimization parameters and the sampler name.

Input

  • seed (int): seed number of the sampler random number generator.
  • ndesign (int): the number of design variables (dimensions of the problem).
  • doe_size (int): the size of the initial and subsequent generations.
  • sampler_name (str): name of the sampling algorithm used to generate samples.
  • bound (tuple[Any, ...]): design variables boundaries.
  • custom_doe (str): path to the text file containing a custom doe.

Inner

  • initial_doe (list[list[float]]): the initial generation sampled from the generator.
Source code in aero_optim/optim/generator.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
def __init__(self,
             seed: int,
             ndesign: int,
             doe_size: int,
             sampler_name: str,
             bound: tuple[Any, ...],
             custom_doe: str = ""):
    """
    Instantiates the Generator class with some optimization parameters and the sampler name.

    **Input**

    - seed (int): seed number of the sampler random number generator.
    - ndesign (int): the number of design variables (dimensions of the problem).
    - doe_size (int): the size of the initial and subsequent generations.
    - sampler_name (str): name of the sampling algorithm used to generate samples.
    - bound (tuple[Any, ...]): design variables boundaries.
    - custom_doe (str): path to the text file containing a custom doe.

    **Inner**

    - initial_doe (list[list[float]]): the initial generation sampled from the generator.
    """
    self.seed: int = seed
    self.ndesign: int = ndesign
    self.doe_size: int = doe_size
    self.sampler: Optional[qmc.QMCEngine] = self.get_sampler(
        "custom" if custom_doe else sampler_name
    )
    self.bound: tuple[Any, ...] = bound
    self.initial_doe: list[list[float]] = self.sample_doe(custom_doe)

_ins_generator(random: Random, args: dict) -> list[float]

Returns a single sample from the initial generation.

Note

random and args are inspyred mandatory arguments
see https://pythonhosted.org/inspyred/tutorial.html#the-generator

Source code in aero_optim/optim/generator.py
71
72
73
74
75
76
77
78
79
80
def _ins_generator(self, random: Random, args: dict) -> list[float]:
    """
    **Returns** a single sample from the initial generation.

    Note:
        __random__ and __args__ are inspyred mandatory arguments</br>
        see https://pythonhosted.org/inspyred/tutorial.html#the-generator
    """
    element = self.initial_doe.pop(0)
    return qmc.scale([element], *self.bound).tolist()[0] if self.sampler else element

_pymoo_generator() -> np.ndarray

Returns all samples from the initial generation.

Source code in aero_optim/optim/generator.py
82
83
84
85
86
87
88
def _pymoo_generator(self) -> np.ndarray:
    """
    **Returns** all samples from the initial generation.
    """
    return (
        qmc.scale(self.initial_doe, *self.bound) if self.sampler else np.array(self.initial_doe)
    )

get_sampler(sampler_name: str) -> Optional[qmc.QMCEngine]

Returns scipy qmc sampler.

Source code in aero_optim/optim/generator.py
48
49
50
51
52
53
54
55
56
57
58
59
60
def get_sampler(self, sampler_name: str) -> Optional[qmc.QMCEngine]:
    """
    **Returns** scipy qmc sampler.
    """
    if sampler_name not in self.sampler_list:
        raise Exception(f"Unrecognized sampler {sampler_name}")
    else:
        return (
            qmc.LatinHypercube(d=self.ndesign, seed=self.seed) if sampler_name == "lhs"
            else qmc.Halton(d=self.ndesign, seed=self.seed) if sampler_name == "halton"
            else qmc.Sobol(d=self.ndesign, seed=self.seed) if sampler_name == "sobol"
            else None
        )