Coverage for moptipy / algorithms / so / ffa / eafea_d.py: 99%
68 statements
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-24 08:49 +0000
« prev ^ index » next coverage.py v7.12.0, created at 2025-11-24 08:49 +0000
1"""
2A Hybrid EA-FEA Algorithm: the `EAFEA-D`.
4The algorithm has two branches: (1) the EA branch, which performs randomized
5local search (RLS), which is in some contexts also called (1+1) EA. (2) the
6FEA branch, which performs RLS but uses frequency fitness assignment (FFA)
7as optimization criterion. This hybrid algorithm has the following features:
9- The new solution of the FEA strand is copied to the EA strand if it is
10 accepted by the FEA strand, i.e., if its H-value is <= than that of the
11 current solution of that strand.
12- The new solution of the EA strand is copied over to the FEA strand if it is
13 better than the current solution of the EA strand.
14- The H-table is updated by both strands.
15- The FEA strand always toggles back to the EA strand.
16- The EA strand toggles to the FEA strand if it did not strictly improve for a
17 certain a number of FES >= log2 of the total consumed FEs.
18"""
19from collections import Counter
20from math import log2
21from typing import Callable, Final
23from numpy.random import Generator
24from pycommons.types import type_error
26from moptipy.algorithms.so.ffa.ffa_h import create_h, log_h
27from moptipy.api.algorithm import Algorithm1
28from moptipy.api.operators import Op0, Op1
29from moptipy.api.process import Process
32class EAFEAD(Algorithm1):
33 """An implementation of the EAFEA-D."""
35 def __init__(self, op0: Op0, op1: Op1, log_h_tbl: bool = False) -> None:
36 """
37 Create the EAFEA-D.
39 :param op0: the nullary search operator
40 :param op1: the unary search operator
41 :param log_h_tbl: should we log the H table?
42 """
43 super().__init__("eafeaD", op0, op1)
44 if not isinstance(log_h_tbl, bool):
45 raise type_error(log_h_tbl, "log_h_tbl", bool)
46 #: True if we should log the H table, False otherwise
47 self.log_h_tbl: Final[bool] = log_h_tbl
49 def solve(self, process: Process) -> None:
50 """
51 Apply the EAFEA-D to an optimization problem.
53 :param process: the black-box process object
54 """
55 # Create records for old and new point in the search space.
56 x_ea = process.create() # record for current solution of the EA
57 x_fea = process.create() # record for current solution of the FEA
58 x_new = process.create() # record for new solution
60 # Obtain the random number generator.
61 random: Final[Generator] = process.get_random()
63 # Put function references in variables to save time.
64 evaluate: Final[Callable] = process.evaluate # the objective
65 should_terminate: Final[Callable] = process.should_terminate
66 xcopy: Final[Callable] = process.copy # copy(dest, source)
67 op0: Final[Callable] = self.op0.op0 # the nullary operator
68 op1: Final[Callable] = self.op1.op1 # the unary operator
70 h, ofs = create_h(process) # Allocate the h-table
72 # Start at a random point in the search space and evaluate it.
73 op0(random, x_ea) # Create 1 solution randomly and
74 y_ea: int | float = evaluate(x_ea) + ofs # evaluate it.
75 xcopy(x_fea, x_ea) # FEA and EA start with the same initial solution.
76 y_fea: int | float = y_ea
78 ea_no_lt_moves: int = 0 # current no-improvement moves
79 fes: int = 1
80 use_ffa: bool = False # We start with the EA branch.
81 toggle_ea_to_fea: bool = False # toggle from ea to fea
83 while not should_terminate(): # Until we need to quit...
84 # Sample and evaluate new solution.
85 op1(random, x_new, x_fea if use_ffa else x_ea)
86 y_new: int | float = evaluate(x_new) + ofs
87 fes += 1 # and increment the FEs
88 h[y_new] += 1 # type: ignore # Always update H.
90 if use_ffa: # The FEA branch uses FFA.
91 use_ffa = False # Always toggle use from FFA to EA.
92 toggle_ea_to_fea = False # do not immediately toogle back
94 h[y_fea] += 1 # type: ignore # Update H for FEA solution.
95 if h[y_new] <= h[y_fea]: # type: ignore # FEA acceptance.
96 xcopy(x_ea, x_new) # Copy solution also to EA.
97 x_fea, x_new = x_new, x_fea
98 y_fea = y_ea = y_new
100 else: # EA or RLS branch performs local search.
101 h[y_ea] += 1 # type: ignore # Update H in *both* branches.
103 if y_new <= y_ea: # The acceptance criterion of RLS / EA.
104 if y_new < y_ea: # Check if we did an actual improvement.
105 ea_no_lt_moves = 0 # non-improving moves counter = 0.
106 xcopy(x_fea, x_new) # Copy solution over to FEA.
107 y_fea = y_new # And store the objective value.
108 else: # The move was *not* an improvement:
109 ea_no_lt_moves += 1 # Increase non-improved counter.
110 toggle_ea_to_fea = ea_no_lt_moves >= log2(fes)
112 x_ea, x_new = x_new, x_ea # Accept new solution.
113 y_ea = y_new # Store objective value.
114 else: # The move was worse than the current solution.
115 ea_no_lt_moves += 1 # Increase non-improvement counter.
116 toggle_ea_to_fea = ea_no_lt_moves >= log2(fes)
118 if toggle_ea_to_fea: # Toggle: EA to FEA.
119 ea_no_lt_moves = 0 # Reset non-improving move counter.
120 use_ffa = True # Toggle to FFA.
122 if not self.log_h_tbl:
123 return # we are done here
125 # After we are done, we want to print the H-table.
126 if h[y_ea] == 0: # type: ignore # Fix the H-table for the case
127 h = Counter() # that only one FE was performed: In this case,
128 h[y_ea] = 1 # make Counter with only a single 1 value inside.
130 log_h(process, h, ofs) # log the H-table