Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1""" 

2Interface to Constrained Optimization By Linear Approximation 

3 

4Functions 

5--------- 

6.. autosummary:: 

7 :toctree: generated/ 

8 

9 fmin_cobyla 

10 

11""" 

12 

13import functools 

14from threading import RLock 

15 

16import numpy as np 

17from scipy.optimize import _cobyla 

18from .optimize import OptimizeResult, _check_unknown_options 

19try: 

20 from itertools import izip 

21except ImportError: 

22 izip = zip 

23 

24__all__ = ['fmin_cobyla'] 

25 

26# Workarund as _cobyla.minimize is not threadsafe 

27# due to an unknown f2py bug and can segfault,  

28# see gh-9658. 

29_module_lock = RLock() 

30def synchronized(func): 

31 @functools.wraps(func) 

32 def wrapper(*args, **kwargs): 

33 with _module_lock: 

34 return func(*args, **kwargs) 

35 return wrapper 

36 

37@synchronized 

38def fmin_cobyla(func, x0, cons, args=(), consargs=None, rhobeg=1.0, 

39 rhoend=1e-4, maxfun=1000, disp=None, catol=2e-4): 

40 """ 

41 Minimize a function using the Constrained Optimization By Linear 

42 Approximation (COBYLA) method. This method wraps a FORTRAN 

43 implementation of the algorithm. 

44 

45 Parameters 

46 ---------- 

47 func : callable 

48 Function to minimize. In the form func(x, \\*args). 

49 x0 : ndarray 

50 Initial guess. 

51 cons : sequence 

52 Constraint functions; must all be ``>=0`` (a single function 

53 if only 1 constraint). Each function takes the parameters `x` 

54 as its first argument, and it can return either a single number or 

55 an array or list of numbers. 

56 args : tuple, optional 

57 Extra arguments to pass to function. 

58 consargs : tuple, optional 

59 Extra arguments to pass to constraint functions (default of None means 

60 use same extra arguments as those passed to func). 

61 Use ``()`` for no extra arguments. 

62 rhobeg : float, optional 

63 Reasonable initial changes to the variables. 

64 rhoend : float, optional 

65 Final accuracy in the optimization (not precisely guaranteed). This 

66 is a lower bound on the size of the trust region. 

67 disp : {0, 1, 2, 3}, optional 

68 Controls the frequency of output; 0 implies no output. 

69 maxfun : int, optional 

70 Maximum number of function evaluations. 

71 catol : float, optional 

72 Absolute tolerance for constraint violations. 

73 

74 Returns 

75 ------- 

76 x : ndarray 

77 The argument that minimises `f`. 

78 

79 See also 

80 -------- 

81 minimize: Interface to minimization algorithms for multivariate 

82 functions. See the 'COBYLA' `method` in particular. 

83 

84 Notes 

85 ----- 

86 This algorithm is based on linear approximations to the objective 

87 function and each constraint. We briefly describe the algorithm. 

88 

89 Suppose the function is being minimized over k variables. At the 

90 jth iteration the algorithm has k+1 points v_1, ..., v_(k+1), 

91 an approximate solution x_j, and a radius RHO_j. 

92 (i.e., linear plus a constant) approximations to the objective 

93 function and constraint functions such that their function values 

94 agree with the linear approximation on the k+1 points v_1,.., v_(k+1). 

95 This gives a linear program to solve (where the linear approximations 

96 of the constraint functions are constrained to be non-negative). 

97 

98 However, the linear approximations are likely only good 

99 approximations near the current simplex, so the linear program is 

100 given the further requirement that the solution, which 

101 will become x_(j+1), must be within RHO_j from x_j. RHO_j only 

102 decreases, never increases. The initial RHO_j is rhobeg and the 

103 final RHO_j is rhoend. In this way COBYLA's iterations behave 

104 like a trust region algorithm. 

105 

106 Additionally, the linear program may be inconsistent, or the 

107 approximation may give poor improvement. For details about 

108 how these issues are resolved, as well as how the points v_i are 

109 updated, refer to the source code or the references below. 

110 

111 

112 References 

113 ---------- 

114 Powell M.J.D. (1994), "A direct search optimization method that models 

115 the objective and constraint functions by linear interpolation.", in 

116 Advances in Optimization and Numerical Analysis, eds. S. Gomez and 

117 J-P Hennart, Kluwer Academic (Dordrecht), pp. 51-67 

118 

119 Powell M.J.D. (1998), "Direct search algorithms for optimization 

120 calculations", Acta Numerica 7, 287-336 

121 

122 Powell M.J.D. (2007), "A view of algorithms for optimization without 

123 derivatives", Cambridge University Technical Report DAMTP 2007/NA03 

124 

125 

126 Examples 

127 -------- 

128 Minimize the objective function f(x,y) = x*y subject 

129 to the constraints x**2 + y**2 < 1 and y > 0:: 

130 

131 >>> def objective(x): 

132 ... return x[0]*x[1] 

133 ... 

134 >>> def constr1(x): 

135 ... return 1 - (x[0]**2 + x[1]**2) 

136 ... 

137 >>> def constr2(x): 

138 ... return x[1] 

139 ... 

140 >>> from scipy.optimize import fmin_cobyla 

141 >>> fmin_cobyla(objective, [0.0, 0.1], [constr1, constr2], rhoend=1e-7) 

142 array([-0.70710685, 0.70710671]) 

143 

144 The exact solution is (-sqrt(2)/2, sqrt(2)/2). 

145 

146 

147 

148 """ 

149 err = "cons must be a sequence of callable functions or a single"\ 

150 " callable function." 

151 try: 

152 len(cons) 

153 except TypeError: 

154 if callable(cons): 

155 cons = [cons] 

156 else: 

157 raise TypeError(err) 

158 else: 

159 for thisfunc in cons: 

160 if not callable(thisfunc): 

161 raise TypeError(err) 

162 

163 if consargs is None: 

164 consargs = args 

165 

166 # build constraints 

167 con = tuple({'type': 'ineq', 'fun': c, 'args': consargs} for c in cons) 

168 

169 # options 

170 opts = {'rhobeg': rhobeg, 

171 'tol': rhoend, 

172 'disp': disp, 

173 'maxiter': maxfun, 

174 'catol': catol} 

175 

176 sol = _minimize_cobyla(func, x0, args, constraints=con, 

177 **opts) 

178 if disp and not sol['success']: 

179 print("COBYLA failed to find a solution: %s" % (sol.message,)) 

180 return sol['x'] 

181 

182@synchronized 

183def _minimize_cobyla(fun, x0, args=(), constraints=(), 

184 rhobeg=1.0, tol=1e-4, maxiter=1000, 

185 disp=False, catol=2e-4, **unknown_options): 

186 """ 

187 Minimize a scalar function of one or more variables using the 

188 Constrained Optimization BY Linear Approximation (COBYLA) algorithm. 

189 

190 Options 

191 ------- 

192 rhobeg : float 

193 Reasonable initial changes to the variables. 

194 tol : float 

195 Final accuracy in the optimization (not precisely guaranteed). 

196 This is a lower bound on the size of the trust region. 

197 disp : bool 

198 Set to True to print convergence messages. If False, 

199 `verbosity` is ignored as set to 0. 

200 maxiter : int 

201 Maximum number of function evaluations. 

202 catol : float 

203 Tolerance (absolute) for constraint violations 

204 

205 """ 

206 _check_unknown_options(unknown_options) 

207 maxfun = maxiter 

208 rhoend = tol 

209 iprint = int(bool(disp)) 

210 

211 # check constraints 

212 if isinstance(constraints, dict): 

213 constraints = (constraints, ) 

214 

215 for ic, con in enumerate(constraints): 

216 # check type 

217 try: 

218 ctype = con['type'].lower() 

219 except KeyError: 

220 raise KeyError('Constraint %d has no type defined.' % ic) 

221 except TypeError: 

222 raise TypeError('Constraints must be defined using a ' 

223 'dictionary.') 

224 except AttributeError: 

225 raise TypeError("Constraint's type must be a string.") 

226 else: 

227 if ctype != 'ineq': 

228 raise ValueError("Constraints of type '%s' not handled by " 

229 "COBYLA." % con['type']) 

230 

231 # check function 

232 if 'fun' not in con: 

233 raise KeyError('Constraint %d has no function defined.' % ic) 

234 

235 # check extra arguments 

236 if 'args' not in con: 

237 con['args'] = () 

238 

239 # m is the total number of constraint values 

240 # it takes into account that some constraints may be vector-valued 

241 cons_lengths = [] 

242 for c in constraints: 

243 f = c['fun'](x0, *c['args']) 

244 try: 

245 cons_length = len(f) 

246 except TypeError: 

247 cons_length = 1 

248 cons_lengths.append(cons_length) 

249 m = sum(cons_lengths) 

250 

251 def calcfc(x, con): 

252 f = fun(x, *args) 

253 i = 0 

254 for size, c in izip(cons_lengths, constraints): 

255 con[i: i + size] = c['fun'](x, *c['args']) 

256 i += size 

257 return f 

258 

259 info = np.zeros(4, np.float64) 

260 xopt, info = _cobyla.minimize(calcfc, m=m, x=np.copy(x0), rhobeg=rhobeg, 

261 rhoend=rhoend, iprint=iprint, maxfun=maxfun, 

262 dinfo=info) 

263 

264 if info[3] > catol: 

265 # Check constraint violation 

266 info[0] = 4 

267 

268 return OptimizeResult(x=xopt, 

269 status=int(info[0]), 

270 success=info[0] == 1, 

271 message={1: 'Optimization terminated successfully.', 

272 2: 'Maximum number of function evaluations ' 

273 'has been exceeded.', 

274 3: 'Rounding errors are becoming damaging ' 

275 'in COBYLA subroutine.', 

276 4: 'Did not converge to a solution ' 

277 'satisfying the constraints. See ' 

278 '`maxcv` for magnitude of violation.', 

279 5: 'NaN result encountered.' 

280 }.get(info[0], 'Unknown exit status.'), 

281 nfev=int(info[1]), 

282 fun=info[2], 

283 maxcv=info[3])