Coverage for moptipy / algorithms / so / ffa / eafea_c.py: 95%

63 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2025-11-24 08:49 +0000

1""" 

2A Hybrid EA-FEA Algorithm: the `EAFEA-C`. 

3 

4The algorithm has two branches: (1) the EA branch, which performs randomized 

5local search (RLS), which is in some contexts also called (1+1) EA. (2) the 

6FEA branch, which performs RLS but uses frequency fitness assignment (FFA) 

7as optimization criterion. This hybrid algorithm has the following features: 

8 

9- The new solution of the FEA strand is copied to the EA strand if it has an 

10 H-value which is not worse than the H-value of the current solution. 

11- The new solution of the EA strand is copied over to the FEA strand if it is 

12 better than the current solution of the EA strand. 

13- The H-table is updated by both strands. 

14- The FEA strand always toggles back to the EA strand. 

15- The EA strand toggles to the FEA strand if it did not improve for a time 

16 limit that is incremented by one whenever a toggle was made. 

17""" 

18from collections import Counter 

19from typing import Callable, Final 

20 

21from numpy.random import Generator 

22from pycommons.types import type_error 

23 

24from moptipy.algorithms.so.ffa.ffa_h import create_h, log_h 

25from moptipy.api.algorithm import Algorithm1 

26from moptipy.api.operators import Op0, Op1 

27from moptipy.api.process import Process 

28 

29 

30class EAFEAC(Algorithm1): 

31 """An implementation of the EAFEA-C.""" 

32 

33 def __init__(self, op0: Op0, op1: Op1, log_h_tbl: bool = False) -> None: 

34 """ 

35 Create the EAFEA-C. 

36 

37 :param op0: the nullary search operator 

38 :param op1: the unary search operator 

39 :param log_h_tbl: should we log the H table? 

40 """ 

41 super().__init__("eafeaC", op0, op1) 

42 if not isinstance(log_h_tbl, bool): 

43 raise type_error(log_h_tbl, "log_h_tbl", bool) 

44 #: True if we should log the H table, False otherwise 

45 self.__log_h_tbl: Final[bool] = log_h_tbl 

46 

47 def solve(self, process: Process) -> None: 

48 """ 

49 Apply the EAFEA-C to an optimization problem. 

50 

51 :param process: the black-box process object 

52 """ 

53 # Create records for old and new point in the search space. 

54 x_ea = process.create() # record for current solution of the EA 

55 x_fea = process.create() # record for current solution of the FEA 

56 x_new = process.create() # record for new solution 

57 

58 # Obtain the random number generator. 

59 random: Final[Generator] = process.get_random() 

60 

61 # Put function references in variables to save time. 

62 evaluate: Final[Callable] = process.evaluate # the objective 

63 should_terminate: Final[Callable] = process.should_terminate 

64 xcopy: Final[Callable] = process.copy # copy(dest, source) 

65 op0: Final[Callable] = self.op0.op0 # the nullary operator 

66 op1: Final[Callable] = self.op1.op1 # the unary operator 

67 

68 h, ofs = create_h(process) # Allocate the h-table 

69 

70 # Start at a random point in the search space and evaluate it. 

71 op0(random, x_ea) # Create 1 solution randomly and 

72 y_ea: int | float = evaluate(x_ea) + ofs # evaluate it. 

73 xcopy(x_fea, x_ea) # FEA and EA start with the same initial solution. 

74 y_fea: int | float = y_ea 

75 

76 ea_max_no_lt_moves: int = 1 # maximum no-improvement moves for EA 

77 ea_no_lt_moves: int = 0 # current no-improvement moves 

78 use_ffa: bool = False # We start with the EA branch. 

79 

80 while not should_terminate(): # Until we need to quit... 

81 # Sample and evaluate new solution. 

82 op1(random, x_new, x_fea if use_ffa else x_ea) 

83 y_new: int | float = evaluate(x_new) + ofs 

84 h[y_new] += 1 # type: ignore # Always update H. 

85 

86 if use_ffa: # The FEA branch uses FFA. 

87 use_ffa = False # Always toggle use from FFA to EA. 

88 

89 h[y_fea] += 1 # type: ignore # Update H for FEA solution. 

90 if h[y_new] <= h[y_fea]: # type: ignore # FEA acceptance. 

91 xcopy(x_ea, x_new) # Copy solution also to EA. 

92 x_fea, x_new = x_new, x_fea 

93 y_fea = y_ea = y_new 

94 

95 else: # EA or RLS branch performs local search. 

96 h[y_ea] += 1 # type: ignore # Update H in *both* branches. 

97 

98 if y_new <= y_ea: # The acceptance criterion of RLS / EA. 

99 if y_new < y_ea: # Check if we did an actual improvement. 

100 ea_no_lt_moves = 0 # non-improving moves counter = 0. 

101 xcopy(x_fea, x_new) # Copy solution over to FEA. 

102 y_fea = y_new # And store the objective value. 

103 else: # The move was *not* an improvement: 

104 ea_no_lt_moves += 1 # Increase non-improved counter. 

105 x_ea, x_new = x_new, x_ea # Accept new solution. 

106 y_ea = y_new # Store objective value. 

107 else: # The move was worse than the current solution. 

108 ea_no_lt_moves += 1 # Increase non-improvement counter. 

109 

110 if ea_no_lt_moves >= ea_max_no_lt_moves: # Toggle: EA to FEA. 

111 ea_no_lt_moves = 0 # Reset non-improving move counter. 

112 ea_max_no_lt_moves += 1 # Increment limit by one. 

113 use_ffa = True # Toggle to FFA. 

114 

115 if not self.__log_h_tbl: 

116 return # we are done here 

117 

118 # After we are done, we want to print the H-table. 

119 if h[y_ea] == 0: # type: ignore # Fix the H-table for the case 

120 h = Counter() # that only one FE was performed: In this case, 

121 h[y_ea] = 1 # make Counter with only a single 1 value inside. 

122 

123 log_h(process, h, ofs) # log the H-table