Coverage for moptipy / algorithms / so / vector / scipy.py: 98%

87 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2025-11-24 08:49 +0000

1""" 

2A set of numerical optimization algorithms from SciPy. 

3 

4The function :func:`scipy.optimize.minimize` provides a set of very 

5efficient numerical/continuous optimization methods. Here we wrap a set of 

6them into our `moptipy` :class:`~moptipy.api.process.Process` API. All 

7algorithms provided in this module are imported and wrapped from SciPy 

8(https://scipy.org). 

9 

10By using the :func:`~moptipy.api.subprocesses.without_should_terminate` 

11tool, we can enforce the termination criteria set via the 

12:class:`~moptipy.api.execution.Execution` builder on external algorithms 

13while piping all their function evaluations through the 

14:meth:`~moptipy.api.process.Process.evaluate` routine of the optimization 

15:meth:`~moptipy.api.process.Process`. This way, we can make these external 

16algorithms usable within `moptipy` in a transparent manner. 

17""" 

18from typing import Any, Callable, Final, cast # pylint: disable=W0611 

19 

20import numpy as np 

21from numpy import ndarray 

22from scipy.optimize import Bounds # type: ignore 

23 

24# isort: off 

25# noinspection PyProtectedMember 

26# pylint: disable=C0412 

27from scipy.optimize._differentialevolution import ( # type: ignore # noqa 

28 differential_evolution, # type: ignore # pylint: disable=C0412 # noqa 

29) 

30# isort: on 

31 

32from pycommons.types import type_error 

33 

34# noinspection PyProtectedMember 

35from scipy.optimize._optimize import ( # type: ignore # noqa: PLC2701 

36 _minimize_bfgs, # type: ignore # noqa: PLC2701 

37 _minimize_cg, # type: ignore # noqa: PLC2701 

38 _minimize_neldermead, # type: ignore # noqa: PLC2701 

39 _minimize_powell, # type: ignore # noqa: PLC2701 

40) 

41 

42# noinspection PyProtectedMember 

43from scipy.optimize._slsqp_py import _minimize_slsqp # type: ignore # noqa 

44 

45# noinspection PyProtectedMember 

46from scipy.optimize._tnc import _minimize_tnc # type: ignore # noqa: PLC2701 

47 

48from moptipy.api.algorithm import Algorithm, Algorithm0 

49from moptipy.api.operators import Op0 

50from moptipy.api.process import Process 

51from moptipy.api.subprocesses import ( 

52 get_remaining_fes, 

53 without_should_terminate, 

54) 

55from moptipy.spaces.vectorspace import VectorSpace 

56from moptipy.utils.logger import KeyValueLogSection 

57 

58 

59class SciPyAlgorithmWrapper(Algorithm0): 

60 """ 

61 A wrapper for the Sci-Py API. 

62 

63 An instance of this class may be re-used, but it must only be used for 

64 problems of the same dimension. 

65 """ 

66 

67 def __init__(self, name: str, op0: Op0, space: VectorSpace) -> None: 

68 """ 

69 Create the algorithm importer from scipy. 

70 

71 :param name: the name of the algorithm 

72 :param op0: the nullary search operator 

73 :param space: the vector space 

74 """ 

75 super().__init__(name, op0) 

76 if not isinstance(space, VectorSpace): 

77 raise type_error(space, "space", VectorSpace) 

78 #: the vector space defining the dimensions and bounds 

79 self.space: Final[VectorSpace] = space 

80 #: the bounds to be used for the internal function call 

81 self.__bounds: Final[Bounds] = Bounds(space.lower_bound, 

82 space.upper_bound) 

83 #: the cache for starting points 

84 self.__x0: Final[ndarray] = space.create() 

85 

86 def _call(self, func: Callable[[np.ndarray], int | float], 

87 x0: np.ndarray, max_fes: int, bounds: Bounds) -> None: 

88 """ 

89 Invoke the SciPi Algorithm. 

90 

91 This function will be overwritten to call the SciPi Algorithm. 

92 

93 :param func: the function to minimize 

94 :param x0: the starting point 

95 :param max_fes: the maximum FEs 

96 :param bounds: the bounds 

97 """ 

98 

99 def __run(self, process: Process) -> None: 

100 """ 

101 Execute the algorithm. 

102 

103 :param process: the process 

104 """ 

105 x0: Final[np.ndarray] = self.__x0 

106 self.op0.op0(process.get_random(), x0) # create first solution 

107 self._call(self.space.clipped(process.evaluate), 

108 x0, get_remaining_fes(process), 

109 self.__bounds) # invoke the algorithm 

110 

111 def solve(self, process: Process) -> None: 

112 """ 

113 Apply the algorithm from SciPy to an optimization problem. 

114 

115 Basically, this wraps a specific configuration of 

116 :func:`scipy.optimize.minimize` into our process API and 

117 invokes it. 

118 

119 :param process: the black-box process object 

120 """ 

121 # invoke the SciPy algorithm implementation 

122 without_should_terminate( 

123 cast("Callable[[Process], Any]", self.__run), process) 

124 

125 def log_parameters_to(self, logger: KeyValueLogSection) -> None: 

126 """ 

127 Log the parameters of the algorithm to a logger. 

128 

129 :param logger: the logger for the parameters 

130 """ 

131 super().log_parameters_to(logger) # log algorithm/operator 

132 self.space.log_bounds(logger) # log bounds 

133 

134 

135# noinspection PyProtectedMember 

136def _call_powell(func: Callable[[np.ndarray], int | float], 

137 x0: np.ndarray, max_fes: int, bounds: Bounds) -> None: 

138 _minimize_powell(func, x0, bounds=bounds, xtol=0.0, ftol=0.0, 

139 maxiter=max_fes, maxfev=max_fes) 

140 

141 

142class Powell(SciPyAlgorithmWrapper): 

143 """ 

144 Powell's Algorithm. 

145 

146 The function :func:`scipy.optimize.minimize` with parameter 

147 "Powell" for continuous optimization. 

148 

149 1. Michael James David Powell. An Efficient Method for Finding the Minimum 

150 of a Function of Several Variables without Calculating Derivatives. The 

151 Computer Journal. 7(2):155-162. 1964. 

152 https://doi.org/10.1093/comjnl/7.2.155 

153 """ 

154 

155 def __init__(self, op0: Op0, space: VectorSpace) -> None: 

156 """ 

157 Create Powell's algorithm importer from scipy. 

158 

159 :param op0: the nullary search operator 

160 :param space: the vector space 

161 """ 

162 super().__init__("powell_scipy", op0, space) 

163 self._call = _call_powell # type: ignore 

164 

165 

166def _call_nelder_mead(func: Callable[[np.ndarray], int | float], 

167 x0: np.ndarray, max_fes: int, bounds: Bounds) -> None: 

168 _minimize_neldermead(func, x0, bounds=bounds, xatol=0.0, fatol=0.0, 

169 maxiter=max_fes, maxfev=max_fes) 

170 

171 

172class NelderMead(SciPyAlgorithmWrapper): 

173 """ 

174 The Downhill Simplex aka. the Nelder-Mead Algorithm. 

175 

176 The function :func:`scipy.optimize.minimize` with parameter 

177 "Nelder-Mead" for continuous optimization by using the Downhill Simplex 

178 algorithm a.k.a., the Nelder-Mead algorithm. Here we wrap it into our API. 

179 

180 Scipy provides the following reference: 

181 

182 1. Fuchang Gao and Lixing Han. Implementing the Nelder-Mead Simplex 

183 Algorithm with Adaptive Parameters. *Computational Optimization and 

184 Applications*. 51(1):259-277. January 2012. 

185 https://doi.org/10.1007/s10589-010-932 

186 2. J. A. Nelder and R. Mead. A Simplex Method for Function Minimization. 

187 *The Computer Journal*. 7(4):308-313. January 1965. Oxford University 

188 Press (OUP). http://dx.doi.org/10.1093/COMJNL/7.4.308 

189 https://people.duke.edu/~hpgavin/cee201/Nelder+Mead-\ 

190ComputerJournal-1965.pdf 

191 3. M. H. Wright. Direct Search Methods: Once Scorned, Now Respectable. 

192 In D.F. Griffiths and G.A. Watson (Eds.) *Proceedings of the 1995 

193 Dundee Biennial Conference in Numerical Analysis*. Harlow, UK: 

194 Addison Wesley Longman, pp. 191-208. 

195 4. Jorge Nocedal and Stephen J. Wright. *Numerical Optimization*. In 

196 Springer Series in Operations Research and Financial Engineering. 

197 New York, NY, USA: Springer. 2006. Second Edition. 

198 ISBN: 978-0-387-30303-1. Chapter 9.5, Page 238. 

199 https://doi.org/10.1007/978-0-387-40065-5. 

200 """ 

201 

202 def __init__(self, op0: Op0, space: VectorSpace) -> None: 

203 """ 

204 Create the Nelder-Mead Downhill Simplex importer from scipy. 

205 

206 :param op0: the nullary search operator 

207 :param space: the vector space 

208 """ 

209 super().__init__("nelderMead_scipy", op0, space) 

210 self._call = _call_nelder_mead # type: ignore 

211 

212 

213def _call_bgfs(func: Callable[[np.ndarray], int | float], 

214 x0: np.ndarray, max_fes: int, _) -> None: 

215 _minimize_bfgs(func, x0, gtol=0.0, maxiter=max_fes) 

216 

217 

218class BGFS(SciPyAlgorithmWrapper): 

219 """ 

220 The wrapper for the BGFS algorithm in SciPy. 

221 

222 This is the quasi-Newton method by C. G. Broyden, Roger Fletcher, 

223 D. Goldfarb, and David F. Shanno (BFGS). 

224 

225 1. Jorge Nocedal and Stephen J. Wright. *Numerical Optimization*. In 

226 Springer Series in Operations Research and Financial Engineering. 

227 New York, NY, USA: Springer. 2006. Second Edition. 

228 ISBN: 978-0-387-30303-1. Chapter 6, Page 136. 

229 https://doi.org/10.1007/978-0-387-40065-5. 

230 2. Roger Fletcher. *Practical Methods of Optimization* (2nd ed.), 

231 New York: John Wiley & Sons. 1987. ISBN 978-0-471-91547-8. 

232 3. C. G. Broyden. The convergence of a class of double-rank minimization 

233 algorithms. *Journal of the Institute of Mathematics and Its 

234 Applications*. 6(1):76-90. March 1970. 

235 http://dx.doi.org/10.1093/imamat/6.1.76 

236 """ 

237 

238 def __init__(self, op0: Op0, space: VectorSpace) -> None: 

239 """ 

240 Create BGFS algorithm importer from scipy. 

241 

242 :param op0: the nullary search operator 

243 :param space: the vector space 

244 """ 

245 super().__init__("bgfs_scipy", op0, space) 

246 self._call = _call_bgfs # type: ignore 

247 

248 

249def _call_cg(func: Callable[[np.ndarray], int | float], 

250 x0: np.ndarray, max_fes: int, _) -> None: 

251 _minimize_cg(func, x0, gtol=0.0, maxiter=max_fes) 

252 

253 

254class CG(SciPyAlgorithmWrapper): 

255 """ 

256 The wrapper for the Conjugate Gradient algorithm in SciPy. 

257 

258 1. Jorge Nocedal and Stephen J. Wright. *Numerical Optimization*. In 

259 Springer Series in Operations Research and Financial Engineering. 

260 New York, NY, USA: Springer. 2006. Second Edition. 

261 ISBN: 978-0-387-30303-1. Chapter 5, Page 101. 

262 """ 

263 

264 def __init__(self, op0: Op0, space: VectorSpace) -> None: 

265 """ 

266 Create Conjugate Gradient algorithm importer from scipy. 

267 

268 :param op0: the nullary search operator 

269 :param space: the vector space 

270 """ 

271 super().__init__("cg_scipy", op0, space) 

272 self._call = _call_cg # type: ignore 

273 

274 

275def _call_slsqp(func: Callable[[np.ndarray], int | float], 

276 x0: np.ndarray, max_fes: int, _) -> None: 

277 _minimize_slsqp(func, x0, ftol=0.0, maxiter=max_fes) 

278 

279 

280class SLSQP(SciPyAlgorithmWrapper): 

281 """ 

282 The Sequential Least Squares Programming (SLSQP) algorithm in SciPy. 

283 

284 1. Dieter Kraft. Algorithm 733: TOMP-Fortran modules for optimal control 

285 calculations. *ACM Transactions on Mathematical Software.* 

286 20(3):262-281. September 1994. https://doi.org/10.1145/192115.192124 

287 """ 

288 

289 def __init__(self, op0: Op0, space: VectorSpace) -> None: 

290 """ 

291 Create the SLSQP algorithm importer from scipy. 

292 

293 :param op0: the nullary search operator 

294 :param space: the vector space 

295 """ 

296 super().__init__("slsqp_scipy", op0, space) 

297 self._call = _call_slsqp # type: ignore 

298 

299 

300def _call_tnc(func: Callable[[np.ndarray], int | float], 

301 x0: np.ndarray, max_fes: int, bounds: Bounds) -> None: 

302 _minimize_tnc( 

303 func, x0, 

304 bounds=[(lb, bounds.ub[i]) for i, lb in enumerate(bounds.lb)], 

305 ftol=0.0, xtol=0.0, gtol=0.0, maxfun=max_fes) 

306 

307 

308class TNC(SciPyAlgorithmWrapper): 

309 """ 

310 The Truncated Newton Method from SciPy. 

311 

312 1. Stephen G. Nash. Newton-Type Minimization via the Lanczos Method. 

313 *SIAM Journal on Numerical Analysis*. 21(4):770-783. August 1984. 

314 https://dx.doi.org/10.1137/0721052. 

315 2. Jorge Nocedal and Stephen J. Wright. *Numerical Optimization*. In 

316 Springer Series in Operations Research and Financial Engineering. 

317 New York, NY, USA: Springer. 2006. Second Edition. 

318 ISBN: 978-0-387-30303-1. https://doi.org/10.1007/978-0-387-40065-5. 

319 """ 

320 

321 def __init__(self, op0: Op0, space: VectorSpace) -> None: 

322 """ 

323 Create the TNC algorithm importer from scipy. 

324 

325 :param op0: the nullary search operator 

326 :param space: the vector space 

327 """ 

328 super().__init__("tnc_scipy", op0, space) 

329 self._call = _call_tnc # type: ignore 

330 

331 

332class DE(Algorithm): 

333 """ 

334 The Differential Evolution Algorithm as implemented by SciPy. 

335 

336 At this point, we do not expose the many parameters of the function 

337 :func:`scipy.optimize.differential_evolution`. 

338 We only use the default settings. This may change in future releases. 

339 

340 1. Rainer Storn and Kenneth Price. Differential Evolution - A Simple and 

341 Efficient Heuristic for global Optimization over Continuous Spaces. 

342 *Journal of Global Optimization* 11(4):341-359. December 1997. 

343 https://doi.org/10.1023/A:1008202821328. 

344 https://www.researchgate.net/publication/227242104 

345 """ 

346 

347 def __init__(self, space: VectorSpace) -> None: 

348 """ 

349 Create the Differential Evolution Algorithm from SciPy. 

350 

351 :param space: the vector space 

352 """ 

353 super().__init__() 

354 if not isinstance(space, VectorSpace): 

355 raise type_error(space, "space", VectorSpace) 

356 #: the vector space defining the dimensions and bounds 

357 self.space: Final[VectorSpace] = space 

358 #: the bounds of the search space, derived from :attr:`space` 

359 self.__bounds: Final[list[tuple[float, float]]] = \ 

360 [(lb, space.upper_bound[i]) 

361 for i, lb in enumerate(space.lower_bound)] 

362 

363 def __run(self, process: Process) -> None: 

364 """ 

365 Execute the algorithm. 

366 

367 :param process: the process 

368 """ 

369 mf = get_remaining_fes(process) # get the number of available FEs 

370 

371 differential_evolution( 

372 self.space.clipped(process.evaluate), 

373 bounds=self.__bounds, 

374 maxiter=int(mf / len(self.__bounds)) + 1, 

375 tol=0.0, seed=process.get_random(), atol=0.0) 

376 

377 def solve(self, process: Process) -> None: 

378 """ 

379 Apply the algorithm from SciPy to an optimization problem. 

380 

381 Basically, this wraps a specific configuration of 

382 :func:`scipy.optimize.minimize` into our process API and 

383 invokes it. 

384 

385 :param process: the black-box process object 

386 """ 

387 # invoke the SciPy algorithm implementation 

388 without_should_terminate( 

389 cast("Callable[[Process], Any]", self.__run), process) 

390 

391 def log_parameters_to(self, logger: KeyValueLogSection) -> None: 

392 """ 

393 Log the parameters of the algorithm to a logger. 

394 

395 :param logger: the logger for the parameters 

396 """ 

397 super().log_parameters_to(logger) # log algorithm/operator 

398 with logger.scope("space") as sp: 

399 self.space.log_parameters_to(sp) # log space 

400 

401 def __str__(self): 

402 """ 

403 Get the name of this algorithm. 

404 

405 :returns: the name of this differential evolution algorithm 

406 """ 

407 return "de_scipy"