Coverage for moptipy / algorithms / so / ffa / eafea_n.py: 98%

66 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2025-11-24 08:49 +0000

1""" 

2A Hybrid EA-FEA Algorithm: the `EAFEA-N`. 

3 

4The algorithm has two branches: (1) the EA branch, which performs randomized 

5local search (RLS), which is in some contexts also called (1+1) EA. (2) the 

6FEA branch, which performs RLS but uses frequency fitness assignment (FFA) 

7as optimization criterion. This hybrid algorithm has the following features: 

8 

9- The new solution of the FEA strand is always copied to the EA strand. 

10- The new solution of the EA strand is copied over to the FEA strand if it is 

11 better than the current solution of the FEA strand. 

12- The H-table is updated by both strands. 

13- The FEA strand always toggles back to the EA strand. 

14- The EA strand toggles to the FEA strand if it did not improve for a certain, 

15 increasing `time limit`. 

16- Every time the EA strand toggles over to the FEA strand, `time_limit` is 

17 incremented by 1. 

18""" 

19from collections import Counter 

20from typing import Callable, Final 

21 

22from numpy.random import Generator 

23from pycommons.types import type_error 

24 

25from moptipy.algorithms.so.ffa.ffa_h import create_h, log_h 

26from moptipy.api.algorithm import Algorithm1 

27from moptipy.api.operators import Op0, Op1 

28from moptipy.api.process import Process 

29 

30 

31class EAFEAN(Algorithm1): 

32 """An implementation of the EAFEA-N.""" 

33 

34 def __init__(self, op0: Op0, op1: Op1, log_h_tbl: bool = False) -> None: 

35 """ 

36 Create the EAFEA-N. 

37 

38 :param op0: the nullary search operator 

39 :param op1: the unary search operator 

40 :param log_h_tbl: should we log the H table? 

41 """ 

42 super().__init__("eafeaN", op0, op1) 

43 if not isinstance(log_h_tbl, bool): 

44 raise type_error(log_h_tbl, "log_h_tbl", bool) 

45 #: True if we should log the H table, False otherwise 

46 self.__log_h_tbl: Final[bool] = log_h_tbl 

47 

48 def solve(self, process: Process) -> None: 

49 """ 

50 Apply the EAFEA-N to an optimization problem. 

51 

52 :param process: the black-box process object 

53 """ 

54 # Create records for old and new point in the search space. 

55 x_ea = process.create() # record for current solution of the EA 

56 x_fea = process.create() # record for current solution of the FEA 

57 x_new = process.create() # record for new solution 

58 

59 # Obtain the random number generator. 

60 random: Final[Generator] = process.get_random() 

61 

62 # Put function references in variables to save time. 

63 evaluate: Final[Callable] = process.evaluate # the objective 

64 should_terminate: Final[Callable] = process.should_terminate 

65 xcopy: Final[Callable] = process.copy # copy(dest, source) 

66 op0: Final[Callable] = self.op0.op0 # the nullary operator 

67 op1: Final[Callable] = self.op1.op1 # the unary operator 

68 

69 h, ofs = create_h(process) # Allocate the h-table 

70 

71 # Start at a random point in the search space and evaluate it. 

72 op0(random, x_ea) # Create 1 solution randomly and 

73 y_ea: int | float = evaluate(x_ea) + ofs # evaluate it. 

74 xcopy(x_fea, x_ea) # FEA and EA start with the same initial solution. 

75 y_fea: int | float = y_ea 

76 

77 ea_max_no_lt_moves: int = 1 # maximum no-improvement moves for EA 

78 ea_no_lt_moves: int = 0 # current no-improvement moves 

79 use_ffa: bool = False # We start with the EA branch. 

80 

81 while not should_terminate(): # Until we need to quit... 

82 # Sample and evaluate new solution. 

83 op1(random, x_new, x_fea if use_ffa else x_ea) 

84 y_new: int | float = evaluate(x_new) + ofs 

85 h[y_new] += 1 # type: ignore # Always update H. 

86 

87 if use_ffa: # The FEA branch uses FFA. 

88 use_ffa = False # Always toggle use from FFA to EA. 

89 ea_no_lt_moves = 0 # Reset the EA no-improv move counter. 

90 

91 h[y_fea] += 1 # type: ignore # Update H for FEA solution. 

92 if h[y_new] <= h[y_fea]: # type: ignore # FEA acceptance. 

93 xcopy(x_ea, x_new) # Copy solution also to EA. 

94 x_fea, x_new = x_new, x_fea 

95 y_fea = y_new 

96 else: # FEA does not accept, but we always copy to the EA, 

97 x_ea, x_new = x_new, x_ea # so we quickly swap here. 

98 y_ea = y_new # we always copy the solution over to the EA 

99 

100 else: # EA or RLS branch performs local search. 

101 h[y_ea] += 1 # type: ignore # Update H in *both* branches. 

102 

103 if y_new < y_fea: # Is new solution better than the FEA one? 

104 xcopy(x_fea, x_new) # Copy solution over to FEA. 

105 y_fea = y_new # And store the objective value. 

106 

107 if y_new <= y_ea: # The acceptance criterion of RLS / EA. 

108 x_ea, x_new = x_new, x_ea # Accept new solution. 

109 y_new, y_ea = y_ea, y_new # Swap values (for line below). 

110 if y_new > y_ea: # Check if we did an actual improvement. 

111 ea_no_lt_moves = 0 # non-improving moves counter = 0. 

112 continue # We can jump to the next iteration. 

113 

114 ea_no_lt_moves += 1 # Increase non-improved counter. 

115 if ea_no_lt_moves >= ea_max_no_lt_moves: # Toggle: EA to FEA. 

116 ea_max_no_lt_moves += 1 # Increment limit by one. 

117 use_ffa = True # Toggle to FFA. 

118 

119 if not self.__log_h_tbl: 

120 return # we are done here 

121 

122 # After we are done, we want to print the H-table. 

123 if h[y_ea] == 0: # type: ignore # Fix the H-table for the case 

124 h = Counter() # that only one FE was performed: In this case, 

125 h[y_ea] = 1 # make Counter with only a single 1 value inside. 

126 

127 log_h(process, h, ofs) # log the H-table