Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1""" 

2basinhopping: The basinhopping global optimization algorithm 

3""" 

4import numpy as np 

5import math 

6from numpy import cos, sin 

7import scipy.optimize 

8from scipy._lib._util import check_random_state 

9 

10__all__ = ['basinhopping'] 

11 

12 

13class Storage(object): 

14 """ 

15 Class used to store the lowest energy structure 

16 """ 

17 def __init__(self, minres): 

18 self._add(minres) 

19 

20 def _add(self, minres): 

21 self.minres = minres 

22 self.minres.x = np.copy(minres.x) 

23 

24 def update(self, minres): 

25 if minres.fun < self.minres.fun: 

26 self._add(minres) 

27 return True 

28 else: 

29 return False 

30 

31 def get_lowest(self): 

32 return self.minres 

33 

34 

35class BasinHoppingRunner(object): 

36 """This class implements the core of the basinhopping algorithm. 

37 

38 x0 : ndarray 

39 The starting coordinates. 

40 minimizer : callable 

41 The local minimizer, with signature ``result = minimizer(x)``. 

42 The return value is an `optimize.OptimizeResult` object. 

43 step_taking : callable 

44 This function displaces the coordinates randomly. Signature should 

45 be ``x_new = step_taking(x)``. Note that `x` may be modified in-place. 

46 accept_tests : list of callables 

47 Each test is passed the kwargs `f_new`, `x_new`, `f_old` and 

48 `x_old`. These tests will be used to judge whether or not to accept 

49 the step. The acceptable return values are True, False, or ``"force 

50 accept"``. If any of the tests return False then the step is rejected. 

51 If ``"force accept"``, then this will override any other tests in 

52 order to accept the step. This can be used, for example, to forcefully 

53 escape from a local minimum that ``basinhopping`` is trapped in. 

54 disp : bool, optional 

55 Display status messages. 

56 

57 """ 

58 def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False): 

59 self.x = np.copy(x0) 

60 self.minimizer = minimizer 

61 self.step_taking = step_taking 

62 self.accept_tests = accept_tests 

63 self.disp = disp 

64 

65 self.nstep = 0 

66 

67 # initialize return object 

68 self.res = scipy.optimize.OptimizeResult() 

69 self.res.minimization_failures = 0 

70 

71 # do initial minimization 

72 minres = minimizer(self.x) 

73 if not minres.success: 

74 self.res.minimization_failures += 1 

75 if self.disp: 

76 print("warning: basinhopping: local minimization failure") 

77 self.x = np.copy(minres.x) 

78 self.energy = minres.fun 

79 if self.disp: 

80 print("basinhopping step %d: f %g" % (self.nstep, self.energy)) 

81 

82 # initialize storage class 

83 self.storage = Storage(minres) 

84 

85 if hasattr(minres, "nfev"): 

86 self.res.nfev = minres.nfev 

87 if hasattr(minres, "njev"): 

88 self.res.njev = minres.njev 

89 if hasattr(minres, "nhev"): 

90 self.res.nhev = minres.nhev 

91 

92 def _monte_carlo_step(self): 

93 """Do one Monte Carlo iteration 

94 

95 Randomly displace the coordinates, minimize, and decide whether 

96 or not to accept the new coordinates. 

97 """ 

98 # Take a random step. Make a copy of x because the step_taking 

99 # algorithm might change x in place 

100 x_after_step = np.copy(self.x) 

101 x_after_step = self.step_taking(x_after_step) 

102 

103 # do a local minimization 

104 minres = self.minimizer(x_after_step) 

105 x_after_quench = minres.x 

106 energy_after_quench = minres.fun 

107 if not minres.success: 

108 self.res.minimization_failures += 1 

109 if self.disp: 

110 print("warning: basinhopping: local minimization failure") 

111 

112 if hasattr(minres, "nfev"): 

113 self.res.nfev += minres.nfev 

114 if hasattr(minres, "njev"): 

115 self.res.njev += minres.njev 

116 if hasattr(minres, "nhev"): 

117 self.res.nhev += minres.nhev 

118 

119 # accept the move based on self.accept_tests. If any test is False, 

120 # then reject the step. If any test returns the special string 

121 # 'force accept', then accept the step regardless. This can be used 

122 # to forcefully escape from a local minimum if normal basin hopping 

123 # steps are not sufficient. 

124 accept = True 

125 for test in self.accept_tests: 

126 testres = test(f_new=energy_after_quench, x_new=x_after_quench, 

127 f_old=self.energy, x_old=self.x) 

128 if testres == 'force accept': 

129 accept = True 

130 break 

131 elif testres is None: 

132 raise ValueError("accept_tests must return True, False, or " 

133 "'force accept'") 

134 elif not testres: 

135 accept = False 

136 

137 # Report the result of the acceptance test to the take step class. 

138 # This is for adaptive step taking 

139 if hasattr(self.step_taking, "report"): 

140 self.step_taking.report(accept, f_new=energy_after_quench, 

141 x_new=x_after_quench, f_old=self.energy, 

142 x_old=self.x) 

143 

144 return accept, minres 

145 

146 def one_cycle(self): 

147 """Do one cycle of the basinhopping algorithm 

148 """ 

149 self.nstep += 1 

150 new_global_min = False 

151 

152 accept, minres = self._monte_carlo_step() 

153 

154 if accept: 

155 self.energy = minres.fun 

156 self.x = np.copy(minres.x) 

157 new_global_min = self.storage.update(minres) 

158 

159 # print some information 

160 if self.disp: 

161 self.print_report(minres.fun, accept) 

162 if new_global_min: 

163 print("found new global minimum on step %d with function" 

164 " value %g" % (self.nstep, self.energy)) 

165 

166 # save some variables as BasinHoppingRunner attributes 

167 self.xtrial = minres.x 

168 self.energy_trial = minres.fun 

169 self.accept = accept 

170 

171 return new_global_min 

172 

173 def print_report(self, energy_trial, accept): 

174 """print a status update""" 

175 minres = self.storage.get_lowest() 

176 print("basinhopping step %d: f %g trial_f %g accepted %d " 

177 " lowest_f %g" % (self.nstep, self.energy, energy_trial, 

178 accept, minres.fun)) 

179 

180 

181class AdaptiveStepsize(object): 

182 """ 

183 Class to implement adaptive stepsize. 

184 

185 This class wraps the step taking class and modifies the stepsize to 

186 ensure the true acceptance rate is as close as possible to the target. 

187 

188 Parameters 

189 ---------- 

190 takestep : callable 

191 The step taking routine. Must contain modifiable attribute 

192 takestep.stepsize 

193 accept_rate : float, optional 

194 The target step acceptance rate 

195 interval : int, optional 

196 Interval for how often to update the stepsize 

197 factor : float, optional 

198 The step size is multiplied or divided by this factor upon each 

199 update. 

200 verbose : bool, optional 

201 Print information about each update 

202 

203 """ 

204 def __init__(self, takestep, accept_rate=0.5, interval=50, factor=0.9, 

205 verbose=True): 

206 self.takestep = takestep 

207 self.target_accept_rate = accept_rate 

208 self.interval = interval 

209 self.factor = factor 

210 self.verbose = verbose 

211 

212 self.nstep = 0 

213 self.nstep_tot = 0 

214 self.naccept = 0 

215 

216 def __call__(self, x): 

217 return self.take_step(x) 

218 

219 def _adjust_step_size(self): 

220 old_stepsize = self.takestep.stepsize 

221 accept_rate = float(self.naccept) / self.nstep 

222 if accept_rate > self.target_accept_rate: 

223 # We're accepting too many steps. This generally means we're 

224 # trapped in a basin. Take bigger steps. 

225 self.takestep.stepsize /= self.factor 

226 else: 

227 # We're not accepting enough steps. Take smaller steps. 

228 self.takestep.stepsize *= self.factor 

229 if self.verbose: 

230 print("adaptive stepsize: acceptance rate %f target %f new " 

231 "stepsize %g old stepsize %g" % (accept_rate, 

232 self.target_accept_rate, self.takestep.stepsize, 

233 old_stepsize)) 

234 

235 def take_step(self, x): 

236 self.nstep += 1 

237 self.nstep_tot += 1 

238 if self.nstep % self.interval == 0: 

239 self._adjust_step_size() 

240 return self.takestep(x) 

241 

242 def report(self, accept, **kwargs): 

243 "called by basinhopping to report the result of the step" 

244 if accept: 

245 self.naccept += 1 

246 

247 

248class RandomDisplacement(object): 

249 """ 

250 Add a random displacement of maximum size `stepsize` to each coordinate 

251 

252 Calling this updates `x` in-place. 

253 

254 Parameters 

255 ---------- 

256 stepsize : float, optional 

257 Maximum stepsize in any dimension 

258 random_gen : {None, `np.random.RandomState`, `np.random.Generator`} 

259 The random number generator that generates the displacements 

260 """ 

261 def __init__(self, stepsize=0.5, random_gen=None): 

262 self.stepsize = stepsize 

263 self.random_gen = check_random_state(random_gen) 

264 

265 def __call__(self, x): 

266 x += self.random_gen.uniform(-self.stepsize, self.stepsize, 

267 np.shape(x)) 

268 return x 

269 

270 

271class MinimizerWrapper(object): 

272 """ 

273 wrap a minimizer function as a minimizer class 

274 """ 

275 def __init__(self, minimizer, func=None, **kwargs): 

276 self.minimizer = minimizer 

277 self.func = func 

278 self.kwargs = kwargs 

279 

280 def __call__(self, x0): 

281 if self.func is None: 

282 return self.minimizer(x0, **self.kwargs) 

283 else: 

284 return self.minimizer(self.func, x0, **self.kwargs) 

285 

286 

287class Metropolis(object): 

288 """ 

289 Metropolis acceptance criterion 

290 

291 Parameters 

292 ---------- 

293 T : float 

294 The "temperature" parameter for the accept or reject criterion. 

295 random_gen : {None, `np.random.RandomState`, `np.random.Generator`} 

296 Random number generator used for acceptance test 

297 """ 

298 def __init__(self, T, random_gen=None): 

299 # Avoid ZeroDivisionError since "MBH can be regarded as a special case 

300 # of the BH framework with the Metropolis criterion, where temperature 

301 # T = 0." (Reject all steps that increase energy.) 

302 self.beta = 1.0 / T if T != 0 else float('inf') 

303 self.random_gen = check_random_state(random_gen) 

304 

305 def accept_reject(self, energy_new, energy_old): 

306 """ 

307 If new energy is lower than old, it will always be accepted. 

308 If new is higher than old, there is a chance it will be accepted, 

309 less likely for larger differences. 

310 """ 

311 with np.errstate(invalid='ignore'): 

312 # The energy values being fed to Metropolis are 1-length arrays, and if 

313 # they are equal, their difference is 0, which gets multiplied by beta, 

314 # which is inf, and array([0]) * float('inf') causes 

315 # 

316 # RuntimeWarning: invalid value encountered in multiply 

317 # 

318 # Ignore this warning so so when the algorithm is on a flat plane, it always 

319 # accepts the step, to try to move off the plane. 

320 prod = -(energy_new - energy_old) * self.beta 

321 w = math.exp(min(0, prod)) 

322 

323 rand = self.random_gen.uniform() 

324 return w >= rand 

325 

326 def __call__(self, **kwargs): 

327 """ 

328 f_new and f_old are mandatory in kwargs 

329 """ 

330 return bool(self.accept_reject(kwargs["f_new"], 

331 kwargs["f_old"])) 

332 

333 

334def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5, 

335 minimizer_kwargs=None, take_step=None, accept_test=None, 

336 callback=None, interval=50, disp=False, niter_success=None, 

337 seed=None): 

338 """ 

339 Find the global minimum of a function using the basin-hopping algorithm 

340 

341 Basin-hopping is a two-phase method that combines a global stepping 

342 algorithm with local minimization at each step. Designed to mimic 

343 the natural process of energy minimization of clusters of atoms, it works 

344 well for similar problems with "funnel-like, but rugged" energy landscapes 

345 [5]_. 

346 

347 As the step-taking, step acceptance, and minimization methods are all 

348 customizable, this function can also be used to implement other two-phase 

349 methods. 

350 

351 Parameters 

352 ---------- 

353 func : callable ``f(x, *args)`` 

354 Function to be optimized. ``args`` can be passed as an optional item 

355 in the dict ``minimizer_kwargs`` 

356 x0 : array_like 

357 Initial guess. 

358 niter : integer, optional 

359 The number of basin-hopping iterations 

360 T : float, optional 

361 The "temperature" parameter for the accept or reject criterion. Higher 

362 "temperatures" mean that larger jumps in function value will be 

363 accepted. For best results ``T`` should be comparable to the 

364 separation (in function value) between local minima. 

365 stepsize : float, optional 

366 Maximum step size for use in the random displacement. 

367 minimizer_kwargs : dict, optional 

368 Extra keyword arguments to be passed to the local minimizer 

369 ``scipy.optimize.minimize()`` Some important options could be: 

370 

371 method : str 

372 The minimization method (e.g. ``"L-BFGS-B"``) 

373 args : tuple 

374 Extra arguments passed to the objective function (``func``) and 

375 its derivatives (Jacobian, Hessian). 

376 

377 take_step : callable ``take_step(x)``, optional 

378 Replace the default step-taking routine with this routine. The default 

379 step-taking routine is a random displacement of the coordinates, but 

380 other step-taking algorithms may be better for some systems. 

381 ``take_step`` can optionally have the attribute ``take_step.stepsize``. 

382 If this attribute exists, then ``basinhopping`` will adjust 

383 ``take_step.stepsize`` in order to try to optimize the global minimum 

384 search. 

385 accept_test : callable, ``accept_test(f_new=f_new, x_new=x_new, f_old=fold, x_old=x_old)``, optional 

386 Define a test which will be used to judge whether or not to accept the 

387 step. This will be used in addition to the Metropolis test based on 

388 "temperature" ``T``. The acceptable return values are True, 

389 False, or ``"force accept"``. If any of the tests return False 

390 then the step is rejected. If the latter, then this will override any 

391 other tests in order to accept the step. This can be used, for example, 

392 to forcefully escape from a local minimum that ``basinhopping`` is 

393 trapped in. 

394 callback : callable, ``callback(x, f, accept)``, optional 

395 A callback function which will be called for all minima found. ``x`` 

396 and ``f`` are the coordinates and function value of the trial minimum, 

397 and ``accept`` is whether or not that minimum was accepted. This can 

398 be used, for example, to save the lowest N minima found. Also, 

399 ``callback`` can be used to specify a user defined stop criterion by 

400 optionally returning True to stop the ``basinhopping`` routine. 

401 interval : integer, optional 

402 interval for how often to update the ``stepsize`` 

403 disp : bool, optional 

404 Set to True to print status messages 

405 niter_success : integer, optional 

406 Stop the run if the global minimum candidate remains the same for this 

407 number of iterations. 

408 seed : {int, `~np.random.RandomState`, `~np.random.Generator`}, optional 

409 If `seed` is not specified the `~np.random.RandomState` singleton is 

410 used. 

411 If `seed` is an int, a new ``RandomState`` instance is used, seeded 

412 with seed. 

413 If `seed` is already a ``RandomState`` or ``Generator`` instance, then 

414 that object is used. 

415 Specify `seed` for repeatable minimizations. The random numbers 

416 generated with this seed only affect the default Metropolis 

417 `accept_test` and the default `take_step`. If you supply your own 

418 `take_step` and `accept_test`, and these functions use random 

419 number generation, then those functions are responsible for the state 

420 of their random number generator. 

421 

422 Returns 

423 ------- 

424 res : OptimizeResult 

425 The optimization result represented as a ``OptimizeResult`` object. 

426 Important attributes are: ``x`` the solution array, ``fun`` the value 

427 of the function at the solution, and ``message`` which describes the 

428 cause of the termination. The ``OptimizeResult`` object returned by the 

429 selected minimizer at the lowest minimum is also contained within this 

430 object and can be accessed through the ``lowest_optimization_result`` 

431 attribute. See `OptimizeResult` for a description of other attributes. 

432 

433 See Also 

434 -------- 

435 minimize : 

436 The local minimization function called once for each basinhopping step. 

437 ``minimizer_kwargs`` is passed to this routine. 

438 

439 Notes 

440 ----- 

441 Basin-hopping is a stochastic algorithm which attempts to find the global 

442 minimum of a smooth scalar function of one or more variables [1]_ [2]_ [3]_ 

443 [4]_. The algorithm in its current form was described by David Wales and 

444 Jonathan Doye [2]_ http://www-wales.ch.cam.ac.uk/. 

445 

446 The algorithm is iterative with each cycle composed of the following 

447 features 

448 

449 1) random perturbation of the coordinates 

450 

451 2) local minimization 

452 

453 3) accept or reject the new coordinates based on the minimized function 

454 value 

455 

456 The acceptance test used here is the Metropolis criterion of standard Monte 

457 Carlo algorithms, although there are many other possibilities [3]_. 

458 

459 This global minimization method has been shown to be extremely efficient 

460 for a wide variety of problems in physics and chemistry. It is 

461 particularly useful when the function has many minima separated by large 

462 barriers. See the Cambridge Cluster Database 

463 http://www-wales.ch.cam.ac.uk/CCD.html for databases of molecular systems 

464 that have been optimized primarily using basin-hopping. This database 

465 includes minimization problems exceeding 300 degrees of freedom. 

466 

467 See the free software program GMIN (http://www-wales.ch.cam.ac.uk/GMIN) for 

468 a Fortran implementation of basin-hopping. This implementation has many 

469 different variations of the procedure described above, including more 

470 advanced step taking algorithms and alternate acceptance criterion. 

471 

472 For stochastic global optimization there is no way to determine if the true 

473 global minimum has actually been found. Instead, as a consistency check, 

474 the algorithm can be run from a number of different random starting points 

475 to ensure the lowest minimum found in each example has converged to the 

476 global minimum. For this reason, ``basinhopping`` will by default simply 

477 run for the number of iterations ``niter`` and return the lowest minimum 

478 found. It is left to the user to ensure that this is in fact the global 

479 minimum. 

480 

481 Choosing ``stepsize``: This is a crucial parameter in ``basinhopping`` and 

482 depends on the problem being solved. The step is chosen uniformly in the 

483 region from x0-stepsize to x0+stepsize, in each dimension. Ideally, it 

484 should be comparable to the typical separation (in argument values) between 

485 local minima of the function being optimized. ``basinhopping`` will, by 

486 default, adjust ``stepsize`` to find an optimal value, but this may take 

487 many iterations. You will get quicker results if you set a sensible 

488 initial value for ``stepsize``. 

489 

490 Choosing ``T``: The parameter ``T`` is the "temperature" used in the 

491 Metropolis criterion. Basinhopping steps are always accepted if 

492 ``func(xnew) < func(xold)``. Otherwise, they are accepted with 

493 probability:: 

494 

495 exp( -(func(xnew) - func(xold)) / T ) 

496 

497 So, for best results, ``T`` should to be comparable to the typical 

498 difference (in function values) between local minima. (The height of 

499 "walls" between local minima is irrelevant.) 

500 

501 If ``T`` is 0, the algorithm becomes Monotonic Basin-Hopping, in which all 

502 steps that increase energy are rejected. 

503 

504 .. versionadded:: 0.12.0 

505 

506 References 

507 ---------- 

508 .. [1] Wales, David J. 2003, Energy Landscapes, Cambridge University Press, 

509 Cambridge, UK. 

510 .. [2] Wales, D J, and Doye J P K, Global Optimization by Basin-Hopping and 

511 the Lowest Energy Structures of Lennard-Jones Clusters Containing up to 

512 110 Atoms. Journal of Physical Chemistry A, 1997, 101, 5111. 

513 .. [3] Li, Z. and Scheraga, H. A., Monte Carlo-minimization approach to the 

514 multiple-minima problem in protein folding, Proc. Natl. Acad. Sci. USA, 

515 1987, 84, 6611. 

516 .. [4] Wales, D. J. and Scheraga, H. A., Global optimization of clusters, 

517 crystals, and biomolecules, Science, 1999, 285, 1368. 

518 .. [5] Olson, B., Hashmi, I., Molloy, K., and Shehu1, A., Basin Hopping as 

519 a General and Versatile Optimization Framework for the Characterization 

520 of Biological Macromolecules, Advances in Artificial Intelligence, 

521 Volume 2012 (2012), Article ID 674832, :doi:`10.1155/2012/674832` 

522 

523 Examples 

524 -------- 

525 The following example is a 1-D minimization problem, with many 

526 local minima superimposed on a parabola. 

527 

528 >>> from scipy.optimize import basinhopping 

529 >>> func = lambda x: np.cos(14.5 * x - 0.3) + (x + 0.2) * x 

530 >>> x0=[1.] 

531 

532 Basinhopping, internally, uses a local minimization algorithm. We will use 

533 the parameter ``minimizer_kwargs`` to tell basinhopping which algorithm to 

534 use and how to set up that minimizer. This parameter will be passed to 

535 ``scipy.optimize.minimize()``. 

536 

537 >>> minimizer_kwargs = {"method": "BFGS"} 

538 >>> ret = basinhopping(func, x0, minimizer_kwargs=minimizer_kwargs, 

539 ... niter=200) 

540 >>> print("global minimum: x = %.4f, f(x0) = %.4f" % (ret.x, ret.fun)) 

541 global minimum: x = -0.1951, f(x0) = -1.0009 

542 

543 Next consider a 2-D minimization problem. Also, this time, we 

544 will use gradient information to significantly speed up the search. 

545 

546 >>> def func2d(x): 

547 ... f = np.cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 

548 ... 0.2) * x[0] 

549 ... df = np.zeros(2) 

550 ... df[0] = -14.5 * np.sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 

551 ... df[1] = 2. * x[1] + 0.2 

552 ... return f, df 

553 

554 We'll also use a different local minimization algorithm. Also, we must tell 

555 the minimizer that our function returns both energy and gradient (Jacobian). 

556 

557 >>> minimizer_kwargs = {"method":"L-BFGS-B", "jac":True} 

558 >>> x0 = [1.0, 1.0] 

559 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

560 ... niter=200) 

561 >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0], 

562 ... ret.x[1], 

563 ... ret.fun)) 

564 global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109 

565 

566 

567 Here is an example using a custom step-taking routine. Imagine you want 

568 the first coordinate to take larger steps than the rest of the coordinates. 

569 This can be implemented like so: 

570 

571 >>> class MyTakeStep(object): 

572 ... def __init__(self, stepsize=0.5): 

573 ... self.stepsize = stepsize 

574 ... def __call__(self, x): 

575 ... s = self.stepsize 

576 ... x[0] += np.random.uniform(-2.*s, 2.*s) 

577 ... x[1:] += np.random.uniform(-s, s, x[1:].shape) 

578 ... return x 

579 

580 Since ``MyTakeStep.stepsize`` exists basinhopping will adjust the magnitude 

581 of ``stepsize`` to optimize the search. We'll use the same 2-D function as 

582 before 

583 

584 >>> mytakestep = MyTakeStep() 

585 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

586 ... niter=200, take_step=mytakestep) 

587 >>> print("global minimum: x = [%.4f, %.4f], f(x0) = %.4f" % (ret.x[0], 

588 ... ret.x[1], 

589 ... ret.fun)) 

590 global minimum: x = [-0.1951, -0.1000], f(x0) = -1.0109 

591 

592 

593 Now, let's do an example using a custom callback function which prints the 

594 value of every minimum found 

595 

596 >>> def print_fun(x, f, accepted): 

597 ... print("at minimum %.4f accepted %d" % (f, int(accepted))) 

598 

599 We'll run it for only 10 basinhopping steps this time. 

600 

601 >>> np.random.seed(1) 

602 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

603 ... niter=10, callback=print_fun) 

604 at minimum 0.4159 accepted 1 

605 at minimum -0.9073 accepted 1 

606 at minimum -0.1021 accepted 1 

607 at minimum -0.1021 accepted 1 

608 at minimum 0.9102 accepted 1 

609 at minimum 0.9102 accepted 1 

610 at minimum 2.2945 accepted 0 

611 at minimum -0.1021 accepted 1 

612 at minimum -1.0109 accepted 1 

613 at minimum -1.0109 accepted 1 

614 

615 

616 The minimum at -1.0109 is actually the global minimum, found already on the 

617 8th iteration. 

618 

619 Now let's implement bounds on the problem using a custom ``accept_test``: 

620 

621 >>> class MyBounds(object): 

622 ... def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ): 

623 ... self.xmax = np.array(xmax) 

624 ... self.xmin = np.array(xmin) 

625 ... def __call__(self, **kwargs): 

626 ... x = kwargs["x_new"] 

627 ... tmax = bool(np.all(x <= self.xmax)) 

628 ... tmin = bool(np.all(x >= self.xmin)) 

629 ... return tmax and tmin 

630 

631 >>> mybounds = MyBounds() 

632 >>> ret = basinhopping(func2d, x0, minimizer_kwargs=minimizer_kwargs, 

633 ... niter=10, accept_test=mybounds) 

634 

635 """ 

636 x0 = np.array(x0) 

637 

638 # set up the np.random.RandomState generator 

639 rng = check_random_state(seed) 

640 

641 # set up minimizer 

642 if minimizer_kwargs is None: 

643 minimizer_kwargs = dict() 

644 wrapped_minimizer = MinimizerWrapper(scipy.optimize.minimize, func, 

645 **minimizer_kwargs) 

646 

647 # set up step-taking algorithm 

648 if take_step is not None: 

649 if not callable(take_step): 

650 raise TypeError("take_step must be callable") 

651 # if take_step.stepsize exists then use AdaptiveStepsize to control 

652 # take_step.stepsize 

653 if hasattr(take_step, "stepsize"): 

654 take_step_wrapped = AdaptiveStepsize(take_step, interval=interval, 

655 verbose=disp) 

656 else: 

657 take_step_wrapped = take_step 

658 else: 

659 # use default 

660 displace = RandomDisplacement(stepsize=stepsize, random_gen=rng) 

661 take_step_wrapped = AdaptiveStepsize(displace, interval=interval, 

662 verbose=disp) 

663 

664 # set up accept tests 

665 accept_tests = [] 

666 if accept_test is not None: 

667 if not callable(accept_test): 

668 raise TypeError("accept_test must be callable") 

669 accept_tests = [accept_test] 

670 

671 # use default 

672 metropolis = Metropolis(T, random_gen=rng) 

673 accept_tests.append(metropolis) 

674 

675 if niter_success is None: 

676 niter_success = niter + 2 

677 

678 bh = BasinHoppingRunner(x0, wrapped_minimizer, take_step_wrapped, 

679 accept_tests, disp=disp) 

680 

681 # start main iteration loop 

682 count, i = 0, 0 

683 message = ["requested number of basinhopping iterations completed" 

684 " successfully"] 

685 for i in range(niter): 

686 new_global_min = bh.one_cycle() 

687 

688 if callable(callback): 

689 # should we pass a copy of x? 

690 val = callback(bh.xtrial, bh.energy_trial, bh.accept) 

691 if val is not None: 

692 if val: 

693 message = ["callback function requested stop early by" 

694 "returning True"] 

695 break 

696 

697 count += 1 

698 if new_global_min: 

699 count = 0 

700 elif count > niter_success: 

701 message = ["success condition satisfied"] 

702 break 

703 

704 # prepare return object 

705 res = bh.res 

706 res.lowest_optimization_result = bh.storage.get_lowest() 

707 res.x = np.copy(res.lowest_optimization_result.x) 

708 res.fun = res.lowest_optimization_result.fun 

709 res.message = message 

710 res.nit = i + 1 

711 return res 

712 

713 

714def _test_func2d_nograd(x): 

715 f = (cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0] 

716 + 1.010876184442655) 

717 return f 

718 

719 

720def _test_func2d(x): 

721 f = (cos(14.5 * x[0] - 0.3) + (x[0] + 0.2) * x[0] + cos(14.5 * x[1] - 

722 0.3) + (x[1] + 0.2) * x[1] + x[0] * x[1] + 1.963879482144252) 

723 df = np.zeros(2) 

724 df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2 + x[1] 

725 df[1] = -14.5 * sin(14.5 * x[1] - 0.3) + 2. * x[1] + 0.2 + x[0] 

726 return f, df 

727 

728 

729if __name__ == "__main__": 

730 print("\n\nminimize a 2-D function without gradient") 

731 # minimum expected at ~[-0.195, -0.1] 

732 kwargs = {"method": "L-BFGS-B"} 

733 x0 = np.array([1.0, 1.]) 

734 scipy.optimize.minimize(_test_func2d_nograd, x0, **kwargs) 

735 ret = basinhopping(_test_func2d_nograd, x0, minimizer_kwargs=kwargs, 

736 niter=200, disp=False) 

737 print("minimum expected at func([-0.195, -0.1]) = 0.0") 

738 print(ret) 

739 

740 print("\n\ntry a harder 2-D problem") 

741 kwargs = {"method": "L-BFGS-B", "jac": True} 

742 x0 = np.array([1.0, 1.0]) 

743 ret = basinhopping(_test_func2d, x0, minimizer_kwargs=kwargs, niter=200, 

744 disp=False) 

745 print("minimum expected at ~, func([-0.19415263, -0.19415263]) = 0") 

746 print(ret)