Coverage for moptipy / mock / end_results.py: 92%

142 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2025-11-24 08:49 +0000

1"""Compute the end result of one mock run.""" 

2 

3from dataclasses import dataclass 

4from math import ceil 

5from typing import Final 

6 

7from numpy.random import Generator 

8from pycommons.io.console import logger 

9from pycommons.types import check_int_range, type_error 

10 

11from moptipy.evaluation.end_results import EndResult 

12from moptipy.mock.components import ( 

13 Algorithm, 

14 BasePerformance, 

15 Experiment, 

16 Instance, 

17) 

18from moptipy.utils.nputils import rand_generator 

19 

20 

21def end_result(performance: BasePerformance, seed: int, 

22 max_fes: int | None = None, 

23 max_time_millis: int | None = None) -> EndResult: 

24 """ 

25 Compute the end result of a mock run. 

26 

27 :param performance: the performance record 

28 :param seed: the random seed 

29 :param max_fes: the maximum number of FEs 

30 :param max_time_millis: the maximum time 

31 :returns: the end result record 

32 """ 

33 if not isinstance(performance, BasePerformance): 

34 raise type_error(performance, "performance", BasePerformance) 

35 

36 limit_time: int = 1_000_000_000_000 

37 limit_fes: int = 1_000_000_000_000 

38 if max_time_millis is not None: 

39 limit_time = check_int_range( 

40 max_time_millis, "max_time_millis", 11, 1_000_000_000_000) 

41 if max_fes is not None: 

42 limit_fes = check_int_range( 

43 max_fes, "max_fes", 11, 1_000_000_000_000) 

44 

45 # The random number generator is determined by the seed. 

46 random: Final[Generator] = rand_generator(seed) 

47 

48 # The speed also has some slight jitter. 

49 jitter: Final[float] = performance.jitter 

50 speed: float = -1 

51 while (speed <= 0) or (speed >= 1): 

52 speed = random.normal(loc=performance.speed, scale=0.01 * jitter) 

53 

54 # total_time ~ total_fes * (performance.speed ** 3) 

55 total_time: int 

56 total_fes: int 

57 trials: int 

58 if max_time_millis is not None: 

59 total_time = int(max_time_millis + abs(random.normal( 

60 loc=0, scale=5 * jitter))) 

61 total_fes = -1 

62 trials = 0 

63 while ((total_fes <= 100) or (total_fes > limit_fes)) \ 

64 and (trials < 10000): 

65 trials += 1 

66 total_fes = int(random.normal( 

67 loc=max(10.0, total_time / (speed ** 3)), 

68 scale=max(100.0, 200.0 / speed))) 

69 if trials >= 10000: 

70 total_fes = int(min(limit_fes, 10000.0)) 

71 else: 

72 total_fes = max_fes if max_fes is not None else 1_000_000 

73 total_time = -1 

74 trials = 0 

75 while ((total_time <= 10) or (total_time > limit_time)) \ 

76 and (trials < 10000): 

77 total_time = int(random.normal( 

78 loc=max(10.0, total_fes * (speed ** 3)), 

79 scale=max(10.0, 100.0 / speed))) 

80 if trials >= 10000: 

81 total_time = int(min(limit_time, 10000)) 

82 

83 # We now look for the vicinity of the local optimum that will be found. 

84 # We use the quality to determine which attractor to use. 

85 # Then we will sample a solution between the next lower and next higher 

86 # attractor, again using the jitter and quality. 

87 

88 # First, add some jitter to the quality. 

89 qual: float = -1 

90 while (qual <= 0) or (qual >= 1): 

91 qual = random.normal(loc=performance.performance, scale=0.02 * jitter) 

92 

93 # Second, find the right attractor and remember it in base. 

94 att: Final[tuple[int, ...]] = performance.instance.attractors 

95 attn: Final[int] = len(att) 

96 att_index: int = -1 

97 best: Final[int] = performance.instance.best 

98 worst: Final[int] = performance.instance.worst 

99 att_trials: int = 1000 

100 while (att_index < 0) or (att_index >= (attn - 1)): 

101 att_trials -= 1 

102 if att_trials <= 0: 

103 att_index = attn // 2 

104 break 

105 att_index = int(random.normal(loc=attn * (qual ** 1.7), 

106 scale=jitter ** 0.9)) 

107 base: Final[int] = att[att_index] 

108 

109 # Third, choose the ends of the intervals in which we can jitter. 

110 jit_end: int = min(int(base + 0.6 * (att[att_index + 1] - base)), worst) 

111 jit_start: int = base 

112 if att_index > 0: 

113 jit_start = int(0.5 + ceil(base - 0.6 * (base - att[att_index - 1]))) 

114 jit_start = max(jit_start, best) 

115 

116 # Now determine best_f. 

117 best_f: int = -1 

118 while (best_f < jit_start) or (best_f > jit_end) \ 

119 or (best_f < best) or (best_f > worst): 

120 uni: float = -1 

121 while (uni <= 0) or (uni >= 1): 

122 uni = abs(random.normal(loc=0, scale=jitter)) 

123 best_f = round(base - uni * (base - jit_start)) \ 

124 if random.uniform(low=0, high=1) < qual else \ 

125 round(base + uni * (jit_end - base)) 

126 

127 # Finally, we need to compute the time we have used. 

128 fact: float = -1 

129 while (fact <= 0) or (fact >= 1): 

130 fact = 1 - random.exponential(scale=(att_index + 1) / (attn + 1)) 

131 

132 last_improvement_fe: int = -1 

133 while (last_improvement_fe <= 0) or (last_improvement_fe >= total_fes): 

134 last_improvement_fe = int(random.normal( 

135 loc=total_fes * fact, scale=total_fes * 0.05 * jitter)) 

136 

137 last_improvement_time: int = -1 

138 while (last_improvement_time <= 0) \ 

139 or (last_improvement_time >= total_time): 

140 last_improvement_time = int(random.normal( 

141 loc=total_time * fact, scale=total_time * 0.05 * jitter)) 

142 

143 return EndResult( 

144 algorithm=performance.algorithm.name, 

145 instance=performance.instance.name, 

146 objective="f", encoding="e", 

147 rand_seed=seed, 

148 best_f=best_f, 

149 last_improvement_fe=last_improvement_fe, 

150 last_improvement_time_millis=last_improvement_time, 

151 total_fes=total_fes, 

152 total_time_millis=total_time, 

153 goal_f=performance.instance.best, 

154 max_fes=max_fes, 

155 max_time_millis=max_time_millis) 

156 

157 

158@dataclass(frozen=True, init=False, order=True) 

159class EndResults: 

160 """An immutable set of end results.""" 

161 

162 #: The experiment. 

163 experiment: Experiment 

164 #: The end results. 

165 results: tuple[EndResult, ...] 

166 #: The maximum permitted FEs. 

167 max_fes: int | None 

168 #: The maximum permitted milliseconds. 

169 max_time_millis: int | None 

170 #: the results per algorithm 

171 __results_for_algo: dict[str | Algorithm, tuple[EndResult, ...]] 

172 #: the results per instance 

173 __results_for_inst: dict[str | Instance, tuple[EndResult, ...]] 

174 

175 def __init__(self, 

176 experiment: Experiment, 

177 results: tuple[EndResult, ...], 

178 max_fes: int | None = None, 

179 max_time_millis: int | None = None): 

180 """ 

181 Create a mock results of an experiment. 

182 

183 :param experiment: the experiment 

184 :param results: the end results 

185 :param max_fes: the maximum permitted FEs 

186 :param max_time_millis: the maximum permitted milliseconds. 

187 """ 

188 if not isinstance(experiment, Experiment): 

189 raise type_error(experiment, "experiment", Experiment) 

190 object.__setattr__(self, "experiment", experiment) 

191 

192 per_algo: Final[dict[str | Algorithm, list[EndResult]]] = {} 

193 per_inst: Final[dict[str | Instance, list[EndResult]]] = {} 

194 if not isinstance(results, tuple): 

195 raise type_error(results, "results", tuple) 

196 if len(results) <= 0: 

197 raise ValueError("end_results must not be empty.") 

198 for a in results: 

199 if not isinstance(a, EndResult): 

200 raise type_error(a, "element of results", EndResult) 

201 aa = experiment.get_algorithm(a.algorithm) 

202 if aa in per_algo: 

203 per_algo[aa].append(a) 

204 else: 

205 per_algo[aa] = [a] 

206 ii = experiment.get_instance(a.instance) 

207 if ii in per_inst: 

208 per_inst[ii].append(a) 

209 else: 

210 per_inst[ii] = [a] 

211 

212 object.__setattr__(self, "results", results) 

213 

214 pa: dict[str | Algorithm, tuple[EndResult, ...]] = {} 

215 for ax in experiment.algorithms: 

216 lax: list[EndResult] = per_algo[ax] 

217 lax.sort() 

218 pa[ax.name] = pa[ax] = tuple(lax) 

219 pi: dict[str | Instance, tuple[EndResult, ...]] = {} 

220 for ix in experiment.instances: 

221 lix: list[EndResult] = per_inst[ix] 

222 lix.sort() 

223 pi[ix.name] = pi[ix] = tuple(lix) 

224 

225 object.__setattr__(self, "_EndResults__results_for_algo", pa) 

226 object.__setattr__(self, "_EndResults__results_for_inst", pi) 

227 

228 if max_fes is not None: 

229 check_int_range(max_fes, "max_fes", 1, 1_000_000_000_000) 

230 object.__setattr__(self, "max_fes", max_fes) 

231 

232 if max_time_millis is not None: 

233 check_int_range( 

234 max_time_millis, "max_time_millis", 1, 1_000_000_000_000_000) 

235 object.__setattr__(self, "max_time_millis", max_time_millis) 

236 

237 @staticmethod 

238 def create(experiment: Experiment, 

239 max_fes: int | None = None, 

240 max_time_millis: int | None = None) -> "EndResults": 

241 """ 

242 Create the end results for a given experiment. 

243 

244 :param experiment: the experiment 

245 :param max_fes: the maximum number of FEs 

246 :param max_time_millis: the maximum time 

247 :returns: the end results 

248 """ 

249 if not isinstance(experiment, Experiment): 

250 raise type_error(experiment, "experiment", Experiment) 

251 logger( 

252 "now creating all end results for an experiment with " 

253 f"{len(experiment.algorithms)} algorithms, " 

254 f"{len(experiment.instances)} instances, and " 

255 f"{len(experiment.per_instance_seeds[0])} runs per setup.") 

256 

257 if max_fes is not None: 

258 check_int_range(max_fes, "max_fes", 1, 1_000_000_000_000) 

259 

260 if max_time_millis is not None: 

261 check_int_range( 

262 max_time_millis, "max_time_millis", 1, 1_000_000_000_000_000) 

263 results: list[EndResult] = [ 

264 end_result(performance=per, 

265 seed=seed, 

266 max_fes=max_fes, 

267 max_time_millis=max_time_millis) 

268 for per in experiment.applications 

269 for seed in experiment.seeds_for_instance(per.instance)] 

270 results.sort() 

271 

272 res: Final[EndResults] = EndResults(experiment=experiment, 

273 results=tuple(results), 

274 max_fes=max_fes, 

275 max_time_millis=max_time_millis) 

276 logger(f"finished creating all {len(res.results)} end results.") 

277 return res 

278 

279 def results_for_algorithm(self, algorithm: str | Algorithm) \ 

280 -> tuple[EndResult, ...]: 

281 """ 

282 Get the end results per algorithm. 

283 

284 :param algorithm: the algorithm 

285 :returns: the end results 

286 """ 

287 return self.__results_for_algo[algorithm] 

288 

289 def results_for_instance(self, instance: str | Instance) \ 

290 -> tuple[EndResult, ...]: 

291 """ 

292 Get the end results per instance. 

293 

294 :param instance: the instance 

295 :returns: the end results 

296 """ 

297 return self.__results_for_inst[instance]