Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1""" 

2Notes 

3----- 

4Code written using below textbook as a reference. 

5Results are checked against the expected outcomes in the text book. 

6 

7Properties: 

8Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles and 

9practice. OTexts, 2014. 

10 

11Author: Terence L van Zyl 

12Modified: Kevin Sheppard 

13""" 

14import numpy as np 

15import pandas as pd 

16from scipy.optimize import basinhopping, brute, minimize 

17from scipy.spatial.distance import sqeuclidean 

18from scipy.special import inv_boxcox 

19from scipy.stats import boxcox 

20 

21from statsmodels.base.model import Results 

22from statsmodels.base.wrapper import (populate_wrapper, union_dicts, 

23 ResultsWrapper) 

24from statsmodels.tools.validation import (array_like, bool_like, float_like, 

25 string_like, int_like) 

26from statsmodels.tsa.base.tsa_model import TimeSeriesModel 

27from statsmodels.tsa.tsatools import freq_to_period 

28import statsmodels.tsa._exponential_smoothers as smoothers 

29 

30 

31def _holt_init(x, xi, p, y, l, b): 

32 """Initialization for the Holt Models""" 

33 p[xi.astype(np.bool)] = x 

34 alpha, beta, _, l0, b0, phi = p[:6] 

35 alphac = 1 - alpha 

36 betac = 1 - beta 

37 y_alpha = alpha * y 

38 l[:] = 0 

39 b[:] = 0 

40 l[0] = l0 

41 b[0] = b0 

42 return alpha, beta, phi, alphac, betac, y_alpha 

43 

44 

45def _holt__(x, xi, p, y, l, b, s, m, n, max_seen): 

46 """ 

47 Simple Exponential Smoothing 

48 Minimization Function 

49 (,) 

50 """ 

51 alpha, beta, phi, alphac, betac, y_alpha = _holt_init(x, xi, p, y, l, b) 

52 for i in range(1, n): 

53 l[i] = (y_alpha[i - 1]) + (alphac * (l[i - 1])) 

54 return sqeuclidean(l, y) 

55 

56 

57def _holt_mul_dam(x, xi, p, y, l, b, s, m, n, max_seen): 

58 """ 

59 Multiplicative and Multiplicative Damped 

60 Minimization Function 

61 (M,) & (Md,) 

62 """ 

63 alpha, beta, phi, alphac, betac, y_alpha = _holt_init(x, xi, p, y, l, b) 

64 if alpha == 0.0: 

65 return max_seen 

66 if beta > alpha: 

67 return max_seen 

68 for i in range(1, n): 

69 l[i] = (y_alpha[i - 1]) + (alphac * (l[i - 1] * b[i - 1]**phi)) 

70 b[i] = (beta * (l[i] / l[i - 1])) + (betac * b[i - 1]**phi) 

71 return sqeuclidean(l * b**phi, y) 

72 

73 

74def _holt_add_dam(x, xi, p, y, l, b, s, m, n, max_seen): 

75 """ 

76 Additive and Additive Damped 

77 Minimization Function 

78 (A,) & (Ad,) 

79 """ 

80 alpha, beta, phi, alphac, betac, y_alpha = _holt_init(x, xi, p, y, l, b) 

81 if alpha == 0.0: 

82 return max_seen 

83 if beta > alpha: 

84 return max_seen 

85 for i in range(1, n): 

86 l[i] = (y_alpha[i - 1]) + (alphac * (l[i - 1] + phi * b[i - 1])) 

87 b[i] = (beta * (l[i] - l[i - 1])) + (betac * phi * b[i - 1]) 

88 return sqeuclidean(l + phi * b, y) 

89 

90 

91def _holt_win_init(x, xi, p, y, l, b, s, m): 

92 """Initialization for the Holt Winters Seasonal Models""" 

93 p[xi.astype(np.bool)] = x 

94 alpha, beta, gamma, l0, b0, phi = p[:6] 

95 s0 = p[6:] 

96 alphac = 1 - alpha 

97 betac = 1 - beta 

98 gammac = 1 - gamma 

99 y_alpha = alpha * y 

100 y_gamma = gamma * y 

101 l[:] = 0 

102 b[:] = 0 

103 s[:] = 0 

104 l[0] = l0 

105 b[0] = b0 

106 s[:m] = s0 

107 return alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma 

108 

109 

110def _holt_win__mul(x, xi, p, y, l, b, s, m, n, max_seen): 

111 """ 

112 Multiplicative Seasonal 

113 Minimization Function 

114 (,M) 

115 """ 

116 alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma = _holt_win_init( 

117 x, xi, p, y, l, b, s, m) 

118 if alpha == 0.0: 

119 return max_seen 

120 if gamma > 1 - alpha: 

121 return max_seen 

122 for i in range(1, n): 

123 l[i] = (y_alpha[i - 1] / s[i - 1]) + (alphac * (l[i - 1])) 

124 s[i + m - 1] = (y_gamma[i - 1] / (l[i - 1])) + (gammac * s[i - 1]) 

125 return sqeuclidean(l * s[:-(m - 1)], y) 

126 

127 

128def _holt_win__add(x, xi, p, y, l, b, s, m, n, max_seen): 

129 """ 

130 Additive Seasonal 

131 Minimization Function 

132 (,A) 

133 """ 

134 alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma = _holt_win_init( 

135 x, xi, p, y, l, b, s, m) 

136 if alpha == 0.0: 

137 return max_seen 

138 if gamma > 1 - alpha: 

139 return max_seen 

140 for i in range(1, n): 

141 l[i] = (y_alpha[i - 1]) - (alpha * s[i - 1]) + (alphac * (l[i - 1])) 

142 s[i + m - 1] = y_gamma[i - 1] - (gamma * (l[i - 1])) + (gammac * s[i - 1]) 

143 return sqeuclidean(l + s[:-(m - 1)], y) 

144 

145 

146def _holt_win_add_mul_dam(x, xi, p, y, l, b, s, m, n, max_seen): 

147 """ 

148 Additive and Additive Damped with Multiplicative Seasonal 

149 Minimization Function 

150 (A,M) & (Ad,M) 

151 """ 

152 alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma = _holt_win_init( 

153 x, xi, p, y, l, b, s, m) 

154 if alpha * beta == 0.0: 

155 return max_seen 

156 if beta > alpha or gamma > 1 - alpha: 

157 return max_seen 

158 for i in range(1, n): 

159 l[i] = (y_alpha[i - 1] / s[i - 1]) + \ 

160 (alphac * (l[i - 1] + phi * b[i - 1])) 

161 b[i] = (beta * (l[i] - l[i - 1])) + (betac * phi * b[i - 1]) 

162 s[i + m - 1] = (y_gamma[i - 1] / (l[i - 1] + phi * 

163 b[i - 1])) + (gammac * s[i - 1]) 

164 return sqeuclidean((l + phi * b) * s[:-(m - 1)], y) 

165 

166 

167def _holt_win_mul_mul_dam(x, xi, p, y, l, b, s, m, n, max_seen): 

168 """ 

169 Multiplicative and Multiplicative Damped with Multiplicative Seasonal 

170 Minimization Function 

171 (M,M) & (Md,M) 

172 """ 

173 alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma = _holt_win_init( 

174 x, xi, p, y, l, b, s, m) 

175 if alpha * beta == 0.0: 

176 return max_seen 

177 if beta > alpha or gamma > 1 - alpha: 

178 return max_seen 

179 for i in range(1, n): 

180 l[i] = (y_alpha[i - 1] / s[i - 1]) + \ 

181 (alphac * (l[i - 1] * b[i - 1]**phi)) 

182 b[i] = (beta * (l[i] / l[i - 1])) + (betac * b[i - 1]**phi) 

183 s[i + m - 1] = (y_gamma[i - 1] / (l[i - 1] * 

184 b[i - 1]**phi)) + (gammac * s[i - 1]) 

185 return sqeuclidean((l * b**phi) * s[:-(m - 1)], y) 

186 

187 

188def _holt_win_add_add_dam(x, xi, p, y, l, b, s, m, n, max_seen): 

189 """ 

190 Additive and Additive Damped with Additive Seasonal 

191 Minimization Function 

192 (A,A) & (Ad,A) 

193 """ 

194 alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma = _holt_win_init( 

195 x, xi, p, y, l, b, s, m) 

196 if alpha * beta == 0.0: 

197 return max_seen 

198 if beta > alpha or gamma > 1 - alpha: 

199 return max_seen 

200 for i in range(1, n): 

201 l[i] = (y_alpha[i - 1]) - (alpha * s[i - 1]) + \ 

202 (alphac * (l[i - 1] + phi * b[i - 1])) 

203 b[i] = (beta * (l[i] - l[i - 1])) + (betac * phi * b[i - 1]) 

204 s[i + m - 1] = y_gamma[i - 1] - (gamma * (l[i - 1] + phi * b[i - 1])) + (gammac * s[i - 1]) 

205 return sqeuclidean((l + phi * b) + s[:-(m - 1)], y) 

206 

207 

208def _holt_win_mul_add_dam(x, xi, p, y, l, b, s, m, n, max_seen): 

209 """ 

210 Multiplicative and Multiplicative Damped with Additive Seasonal 

211 Minimization Function 

212 (M,A) & (M,Ad) 

213 """ 

214 alpha, beta, gamma, phi, alphac, betac, gammac, y_alpha, y_gamma = _holt_win_init( 

215 x, xi, p, y, l, b, s, m) 

216 if alpha * beta == 0.0: 

217 return max_seen 

218 if beta > alpha or gamma > 1 - alpha: 

219 return max_seen 

220 for i in range(1, n): 

221 l[i] = (y_alpha[i - 1]) - (alpha * s[i - 1]) + \ 

222 (alphac * (l[i - 1] * b[i - 1]**phi)) 

223 b[i] = (beta * (l[i] / l[i - 1])) + (betac * b[i - 1]**phi) 

224 s[i + m - 1] = y_gamma[i - 1] - \ 

225 (gamma * (l[i - 1] * b[i - 1]**phi)) + (gammac * s[i - 1]) 

226 return sqeuclidean((l * phi * b) + s[:-(m - 1)], y) 

227 

228 

229SMOOTHERS = {('mul', 'add'): smoothers._holt_win_add_mul_dam, 

230 ('mul', 'mul'): smoothers._holt_win_mul_mul_dam, 

231 ('mul', None): smoothers._holt_win__mul, 

232 ('add', 'add'): smoothers._holt_win_add_add_dam, 

233 ('add', 'mul'): smoothers._holt_win_mul_add_dam, 

234 ('add', None): smoothers._holt_win__add, 

235 (None, 'add'): smoothers._holt_add_dam, 

236 (None, 'mul'): smoothers._holt_mul_dam, 

237 (None, None): smoothers._holt__} 

238 

239PY_SMOOTHERS = {('mul', 'add'): _holt_win_add_mul_dam, 

240 ('mul', 'mul'): _holt_win_mul_mul_dam, 

241 ('mul', None): _holt_win__mul, 

242 ('add', 'add'): _holt_win_add_add_dam, 

243 ('add', 'mul'): _holt_win_mul_add_dam, 

244 ('add', None): _holt_win__add, 

245 (None, 'add'): _holt_add_dam, 

246 (None, 'mul'): _holt_mul_dam, 

247 (None, None): _holt__} 

248 

249 

250class HoltWintersResults(Results): 

251 """ 

252 Holt Winter's Exponential Smoothing Results 

253 

254 Parameters 

255 ---------- 

256 model : ExponentialSmoothing instance 

257 The fitted model instance 

258 params : dict 

259 All the parameters for the Exponential Smoothing model. 

260 

261 Attributes 

262 ---------- 

263 params: dict 

264 All the parameters for the Exponential Smoothing model. 

265 params_formatted: pd.DataFrame 

266 DataFrame containing all parameters, their short names and a flag 

267 indicating whether the parameter's value was optimized to fit the data. 

268 fittedfcast: ndarray 

269 An array of both the fitted values and forecast values. 

270 fittedvalues: ndarray 

271 An array of the fitted values. Fitted by the Exponential Smoothing 

272 model. 

273 fcastvalues: ndarray 

274 An array of the forecast values forecast by the Exponential Smoothing 

275 model. 

276 sse: float 

277 The sum of squared errors 

278 level: ndarray 

279 An array of the levels values that make up the fitted values. 

280 slope: ndarray 

281 An array of the slope values that make up the fitted values. 

282 season: ndarray 

283 An array of the seasonal values that make up the fitted values. 

284 aic: float 

285 The Akaike information criterion. 

286 bic: float 

287 The Bayesian information criterion. 

288 aicc: float 

289 AIC with a correction for finite sample sizes. 

290 resid: ndarray 

291 An array of the residuals of the fittedvalues and actual values. 

292 k: int 

293 the k parameter used to remove the bias in AIC, BIC etc. 

294 optimized: bool 

295 Flag indicating whether the model parameters were optimized to fit 

296 the data. 

297 mle_retvals: {None, scipy.optimize.optimize.OptimizeResult} 

298 Optimization results if the parameters were optimized to fit the data. 

299 """ 

300 

301 def __init__(self, model, params, **kwargs): 

302 self.data = model.data 

303 super(HoltWintersResults, self).__init__(model, params, **kwargs) 

304 

305 def predict(self, start=None, end=None): 

306 """ 

307 In-sample prediction and out-of-sample forecasting 

308 

309 Parameters 

310 ---------- 

311 start : int, str, or datetime, optional 

312 Zero-indexed observation number at which to start forecasting, ie., 

313 the first forecast is start. Can also be a date string to 

314 parse or a datetime type. Default is the the zeroth observation. 

315 end : int, str, or datetime, optional 

316 Zero-indexed observation number at which to end forecasting, ie., 

317 the first forecast is start. Can also be a date string to 

318 parse or a datetime type. However, if the dates index does not 

319 have a fixed frequency, end must be an integer index if you 

320 want out of sample prediction. Default is the last observation in 

321 the sample. 

322 

323 Returns 

324 ------- 

325 forecast : ndarray 

326 Array of out of sample forecasts. 

327 """ 

328 return self.model.predict(self.params, start, end) 

329 

330 def forecast(self, steps=1): 

331 """ 

332 Out-of-sample forecasts 

333 

334 Parameters 

335 ---------- 

336 steps : int 

337 The number of out of sample forecasts from the end of the 

338 sample. 

339 

340 Returns 

341 ------- 

342 forecast : ndarray 

343 Array of out of sample forecasts 

344 """ 

345 try: 

346 freq = getattr(self.model._index, 'freq', 1) 

347 start = self.model._index[-1] + freq 

348 end = self.model._index[-1] + steps * freq 

349 return self.model.predict(self.params, start=start, end=end) 

350 except (AttributeError, ValueError): 

351 # May occur when the index does not have a freq 

352 return self.model._predict(h=steps, **self.params).fcastvalues 

353 

354 def summary(self): 

355 """ 

356 Summarize the fitted Model 

357 

358 Returns 

359 ------- 

360 smry : Summary instance 

361 This holds the summary table and text, which can be printed or 

362 converted to various output formats. 

363 

364 See Also 

365 -------- 

366 statsmodels.iolib.summary.Summary 

367 """ 

368 from statsmodels.iolib.summary import Summary 

369 from statsmodels.iolib.table import SimpleTable 

370 model = self.model 

371 title = model.__class__.__name__ + ' Model Results' 

372 

373 dep_variable = 'endog' 

374 if isinstance(self.model.endog, pd.DataFrame): 

375 dep_variable = self.model.endog.columns[0] 

376 elif isinstance(self.model.endog, pd.Series): 

377 dep_variable = self.model.endog.name 

378 seasonal_periods = None if self.model.seasonal is None else self.model.seasonal_periods 

379 lookup = {'add': 'Additive', 'additive': 'Additive', 

380 'mul': 'Multiplicative', 'multiplicative': 'Multiplicative', None: 'None'} 

381 transform = self.params['use_boxcox'] 

382 box_cox_transform = True if transform else False 

383 box_cox_coeff = transform if isinstance(transform, str) else self.params['lamda'] 

384 if isinstance(box_cox_coeff, float): 

385 box_cox_coeff = '{:>10.5f}'.format(box_cox_coeff) 

386 top_left = [('Dep. Variable:', [dep_variable]), 

387 ('Model:', [model.__class__.__name__]), 

388 ('Optimized:', [str(np.any(self.optimized))]), 

389 ('Trend:', [lookup[self.model.trend]]), 

390 ('Seasonal:', [lookup[self.model.seasonal]]), 

391 ('Seasonal Periods:', [str(seasonal_periods)]), 

392 ('Box-Cox:', [str(box_cox_transform)]), 

393 ('Box-Cox Coeff.:', [str(box_cox_coeff)])] 

394 

395 top_right = [ 

396 ('No. Observations:', [str(len(self.model.endog))]), 

397 ('SSE', ['{:5.3f}'.format(self.sse)]), 

398 ('AIC', ['{:5.3f}'.format(self.aic)]), 

399 ('BIC', ['{:5.3f}'.format(self.bic)]), 

400 ('AICC', ['{:5.3f}'.format(self.aicc)]), 

401 ('Date:', None), 

402 ('Time:', None)] 

403 

404 smry = Summary() 

405 smry.add_table_2cols(self, gleft=top_left, gright=top_right, 

406 title=title) 

407 formatted = self.params_formatted # type: pd.DataFrame 

408 

409 def _fmt(x): 

410 abs_x = np.abs(x) 

411 scale = 1 

412 if abs_x != 0: 

413 scale = int(np.log10(abs_x)) 

414 if scale > 4 or scale < -3: 

415 return '{:>20.5g}'.format(x) 

416 dec = min(7 - scale, 7) 

417 fmt = '{{:>20.{0}f}}'.format(dec) 

418 return fmt.format(x) 

419 

420 tab = [] 

421 for _, vals in formatted.iterrows(): 

422 tab.append([_fmt(vals.iloc[1]), 

423 '{0:>20}'.format(vals.iloc[0]), 

424 '{0:>20}'.format(str(bool(vals.iloc[2])))]) 

425 params_table = SimpleTable(tab, headers=['coeff', 'code', 'optimized'], 

426 title="", 

427 stubs=list(formatted.index)) 

428 

429 smry.tables.append(params_table) 

430 

431 return smry 

432 

433 

434class HoltWintersResultsWrapper(ResultsWrapper): 

435 _attrs = {'fittedvalues': 'rows', 

436 'level': 'rows', 

437 'resid': 'rows', 

438 'season': 'rows', 

439 'slope': 'rows'} 

440 _wrap_attrs = union_dicts(ResultsWrapper._wrap_attrs, _attrs) 

441 _methods = {'predict': 'dates', 

442 'forecast': 'dates'} 

443 _wrap_methods = union_dicts(ResultsWrapper._wrap_methods, _methods) 

444 

445 

446populate_wrapper(HoltWintersResultsWrapper, HoltWintersResults) 

447 

448 

449class ExponentialSmoothing(TimeSeriesModel): 

450 """ 

451 Holt Winter's Exponential Smoothing 

452 

453 Parameters 

454 ---------- 

455 endog : array_like 

456 Time series 

457 trend : {"add", "mul", "additive", "multiplicative", None}, optional 

458 Type of trend component. 

459 damped : bool, optional 

460 Should the trend component be damped. 

461 seasonal : {"add", "mul", "additive", "multiplicative", None}, optional 

462 Type of seasonal component. 

463 seasonal_periods : int, optional 

464 The number of periods in a complete seasonal cycle, e.g., 4 for 

465 quarterly data or 7 for daily data with a weekly cycle. 

466 

467 Returns 

468 ------- 

469 results : ExponentialSmoothing class 

470 

471 Notes 

472 ----- 

473 This is a full implementation of the holt winters exponential smoothing as 

474 per [1]_. This includes all the unstable methods as well as the stable 

475 methods. The implementation of the library covers the functionality of the 

476 R library as much as possible whilst still being Pythonic. 

477 

478 References 

479 ---------- 

480 .. [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles 

481 and practice. OTexts, 2014. 

482 """ 

483 

484 def __init__(self, endog, trend=None, damped=False, seasonal=None, 

485 seasonal_periods=None, dates=None, freq=None, missing='none'): 

486 super(ExponentialSmoothing, self).__init__(endog, None, dates, 

487 freq, missing=missing) 

488 self.endog = self.endog 

489 self._y = self._data = array_like(endog, 'endog', contiguous=True, 

490 order='C') 

491 options = ("add", "mul", "additive", "multiplicative") 

492 trend = string_like(trend, 'trend', options=options, optional=True) 

493 if trend in ['additive', 'multiplicative']: 

494 trend = {'additive': 'add', 'multiplicative': 'mul'}[trend] 

495 self.trend = trend 

496 self.damped = bool_like(damped, 'damped') 

497 seasonal = string_like(seasonal, 'seasonal', options=options, 

498 optional=True) 

499 if seasonal in ['additive', 'multiplicative']: 

500 seasonal = {'additive': 'add', 'multiplicative': 'mul'}[seasonal] 

501 self.seasonal = seasonal 

502 self.trending = trend in ['mul', 'add'] 

503 self.seasoning = seasonal in ['mul', 'add'] 

504 if (self.trend == 'mul' or self.seasonal == 'mul') and \ 

505 not np.all(self._data > 0.0): 

506 raise ValueError('endog must be strictly positive when using' 

507 'multiplicative trend or seasonal components.') 

508 if self.damped and not self.trending: 

509 raise ValueError('Can only dampen the trend component') 

510 if self.seasoning: 

511 self.seasonal_periods = int_like(seasonal_periods, 

512 'seasonal_periods', optional=True) 

513 if seasonal_periods is None: 

514 self.seasonal_periods = freq_to_period(self._index_freq) 

515 if self.seasonal_periods <= 1: 

516 raise ValueError('seasonal_periods must be larger than 1.') 

517 else: 

518 self.seasonal_periods = 0 

519 self.nobs = len(self.endog) 

520 

521 def predict(self, params, start=None, end=None): 

522 """ 

523 Returns in-sample and out-of-sample prediction. 

524 

525 Parameters 

526 ---------- 

527 params : ndarray 

528 The fitted model parameters. 

529 start : int, str, or datetime 

530 Zero-indexed observation number at which to start forecasting, ie., 

531 the first forecast is start. Can also be a date string to 

532 parse or a datetime type. 

533 end : int, str, or datetime 

534 Zero-indexed observation number at which to end forecasting, ie., 

535 the first forecast is start. Can also be a date string to 

536 parse or a datetime type. 

537 

538 Returns 

539 ------- 

540 predicted values : ndarray 

541 """ 

542 if start is None: 

543 freq = getattr(self._index, 'freq', 1) 

544 start = self._index[-1] + freq 

545 start, end, out_of_sample, prediction_index = self._get_prediction_index( 

546 start=start, end=end) 

547 if out_of_sample > 0: 

548 res = self._predict(h=out_of_sample, **params) 

549 else: 

550 res = self._predict(h=0, **params) 

551 return res.fittedfcast[start:end + out_of_sample + 1] 

552 

553 def fit(self, smoothing_level=None, smoothing_slope=None, smoothing_seasonal=None, 

554 damping_slope=None, optimized=True, use_boxcox=False, remove_bias=False, 

555 use_basinhopping=False, start_params=None, initial_level=None, initial_slope=None, 

556 use_brute=True): 

557 """ 

558 Fit the model 

559 

560 Parameters 

561 ---------- 

562 smoothing_level : float, optional 

563 The alpha value of the simple exponential smoothing, if the value 

564 is set then this value will be used as the value. 

565 smoothing_slope : float, optional 

566 The beta value of the Holt's trend method, if the value is 

567 set then this value will be used as the value. 

568 smoothing_seasonal : float, optional 

569 The gamma value of the holt winters seasonal method, if the value 

570 is set then this value will be used as the value. 

571 damping_slope : float, optional 

572 The phi value of the damped method, if the value is 

573 set then this value will be used as the value. 

574 optimized : bool, optional 

575 Estimate model parameters by maximizing the log-likelihood 

576 use_boxcox : {True, False, 'log', float}, optional 

577 Should the Box-Cox transform be applied to the data first? If 'log' 

578 then apply the log. If float then use lambda equal to float. 

579 remove_bias : bool, optional 

580 Remove bias from forecast values and fitted values by enforcing 

581 that the average residual is equal to zero. 

582 use_basinhopping : bool, optional 

583 Using Basin Hopping optimizer to find optimal values 

584 start_params : ndarray, optional 

585 Starting values to used when optimizing the fit. If not provided, 

586 starting values are determined using a combination of grid search 

587 and reasonable values based on the initial values of the data 

588 initial_level : float, optional 

589 Value to use when initializing the fitted level. 

590 initial_slope : float, optional 

591 Value to use when initializing the fitted slope. 

592 use_brute : bool, optional 

593 Search for good starting values using a brute force (grid) 

594 optimizer. If False, a naive set of starting values is used. 

595 

596 Returns 

597 ------- 

598 results : HoltWintersResults class 

599 See statsmodels.tsa.holtwinters.HoltWintersResults 

600 

601 Notes 

602 ----- 

603 This is a full implementation of the holt winters exponential smoothing 

604 as per [1]. This includes all the unstable methods as well as the 

605 stable methods. The implementation of the library covers the 

606 functionality of the R library as much as possible whilst still 

607 being Pythonic. 

608 

609 References 

610 ---------- 

611 [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles 

612 and practice. OTexts, 2014. 

613 """ 

614 # Variable renames to alpha,beta, etc as this helps with following the 

615 # mathematical notation in general 

616 alpha = float_like(smoothing_level, 'smoothing_level', True) 

617 beta = float_like(smoothing_slope, 'smoothing_slope', True) 

618 gamma = float_like(smoothing_seasonal, 'smoothing_seasonal', True) 

619 phi = float_like(damping_slope, 'damping_slope', True) 

620 l0 = self._l0 = float_like(initial_level, 'initial_level', True) 

621 b0 = self._b0 = float_like(initial_slope, 'initial_slope', True) 

622 if start_params is not None: 

623 start_params = array_like(start_params, 'start_params', 

624 contiguous=True) 

625 data = self._data 

626 damped = self.damped 

627 seasoning = self.seasoning 

628 trending = self.trending 

629 trend = self.trend 

630 seasonal = self.seasonal 

631 m = self.seasonal_periods 

632 opt = None 

633 phi = phi if damped else 1.0 

634 if use_boxcox == 'log': 

635 lamda = 0.0 

636 y = boxcox(data, lamda) 

637 elif isinstance(use_boxcox, float): 

638 lamda = use_boxcox 

639 y = boxcox(data, lamda) 

640 elif use_boxcox: 

641 y, lamda = boxcox(data) 

642 else: 

643 lamda = None 

644 y = data.squeeze() 

645 self._y = y 

646 lvls = np.zeros(self.nobs) 

647 b = np.zeros(self.nobs) 

648 s = np.zeros(self.nobs + m - 1) 

649 p = np.zeros(6 + m) 

650 max_seen = np.finfo(np.double).max 

651 l0, b0, s0 = self.initial_values() 

652 

653 xi = np.zeros_like(p, dtype=np.bool) 

654 if optimized: 

655 init_alpha = alpha if alpha is not None else 0.5 / max(m, 1) 

656 init_beta = beta if beta is not None else 0.1 * init_alpha if trending else beta 

657 init_gamma = None 

658 init_phi = phi if phi is not None else 0.99 

659 # Selection of functions to optimize for appropriate parameters 

660 if seasoning: 

661 init_gamma = gamma if gamma is not None else 0.05 * \ 

662 (1 - init_alpha) 

663 xi = np.array([alpha is None, trending and beta is None, gamma is None, 

664 initial_level is None, trending and initial_slope is None, 

665 phi is None and damped] + [True] * m) 

666 func = SMOOTHERS[(seasonal, trend)] 

667 elif trending: 

668 xi = np.array([alpha is None, beta is None, False, 

669 initial_level is None, initial_slope is None, 

670 phi is None and damped] + [False] * m) 

671 func = SMOOTHERS[(None, trend)] 

672 else: 

673 xi = np.array([alpha is None, False, False, 

674 initial_level is None, False, False] + [False] * m) 

675 func = SMOOTHERS[(None, None)] 

676 p[:] = [init_alpha, init_beta, init_gamma, l0, b0, init_phi] + s0 

677 if np.any(xi): 

678 # txi [alpha, beta, gamma, l0, b0, phi, s0,..,s_(m-1)] 

679 # Have a quick look in the region for a good starting place for alpha etc. 

680 # using guesstimates for the levels 

681 txi = xi & np.array([True, True, True, False, False, True] + [False] * m) 

682 txi = txi.astype(np.bool) 

683 bounds = ([(0.0, 1.0), (0.0, 1.0), (0.0, 1.0), (0.0, None), 

684 (0.0, None), (0.0, 1.0)] + [(None, None), ] * m) 

685 args = (txi.astype(np.uint8), p, y, lvls, b, s, m, self.nobs, 

686 max_seen) 

687 if start_params is None and np.any(txi) and use_brute: 

688 _bounds = [bnd for bnd, flag in zip(bounds, txi) if flag] 

689 res = brute(func, _bounds, args, Ns=20, 

690 full_output=True, finish=None) 

691 p[txi], max_seen, _, _ = res 

692 else: 

693 if start_params is not None: 

694 if len(start_params) != xi.sum(): 

695 msg = 'start_params must have {0} values but ' \ 

696 'has {1} instead' 

697 nxi, nsp = len(xi), len(start_params) 

698 raise ValueError(msg.format(nxi, nsp)) 

699 p[xi] = start_params 

700 args = (xi.astype(np.uint8), p, y, lvls, b, s, m, 

701 self.nobs, max_seen) 

702 max_seen = func(np.ascontiguousarray(p[xi]), *args) 

703 # alpha, beta, gamma, l0, b0, phi = p[:6] 

704 # s0 = p[6:] 

705 # bounds = np.array([(0.0,1.0),(0.0,1.0),(0.0,1.0),(0.0,None), 

706 # (0.0,None),(0.8,1.0)] + [(None,None),]*m) 

707 args = (xi.astype(np.uint8), p, y, lvls, b, s, m, self.nobs, max_seen) 

708 if use_basinhopping: 

709 # Take a deeper look in the local minimum we are in to find the best 

710 # solution to parameters, maybe hop around to try escape the local 

711 # minimum we may be in. 

712 _bounds = [bnd for bnd, flag in zip(bounds, xi) if flag] 

713 res = basinhopping(func, p[xi], 

714 minimizer_kwargs={'args': args, 'bounds': _bounds}, 

715 stepsize=0.01) 

716 success = res.lowest_optimization_result.success 

717 else: 

718 # Take a deeper look in the local minimum we are in to find the best 

719 # solution to parameters 

720 _bounds = [bnd for bnd, flag in zip(bounds, xi) if flag] 

721 lb, ub = np.asarray(_bounds).T.astype(np.float) 

722 initial_p = p[xi] 

723 

724 # Ensure strictly inbounds 

725 loc = initial_p <= lb 

726 upper = ub[loc].copy() 

727 upper[~np.isfinite(upper)] = 100.0 

728 eps = 1e-4 

729 initial_p[loc] = lb[loc] + eps * (upper - lb[loc]) 

730 

731 loc = initial_p >= ub 

732 lower = lb[loc].copy() 

733 lower[~np.isfinite(lower)] = -100.0 

734 eps = 1e-4 

735 initial_p[loc] = ub[loc] - eps * (ub[loc] - lower) 

736 

737 res = minimize(func, initial_p, args=args, bounds=_bounds) 

738 success = res.success 

739 

740 if not success: 

741 from warnings import warn 

742 from statsmodels.tools.sm_exceptions import ConvergenceWarning 

743 warn("Optimization failed to converge. Check mle_retvals.", 

744 ConvergenceWarning) 

745 p[xi] = res.x 

746 opt = res 

747 else: 

748 from warnings import warn 

749 from statsmodels.tools.sm_exceptions import EstimationWarning 

750 message = "Model has no free parameters to estimate. Set " \ 

751 "optimized=False to suppress this warning" 

752 warn(message, EstimationWarning) 

753 

754 [alpha, beta, gamma, l0, b0, phi] = p[:6] 

755 s0 = p[6:] 

756 

757 hwfit = self._predict(h=0, smoothing_level=alpha, smoothing_slope=beta, 

758 smoothing_seasonal=gamma, damping_slope=phi, 

759 initial_level=l0, initial_slope=b0, initial_seasons=s0, 

760 use_boxcox=use_boxcox, remove_bias=remove_bias, is_optimized=xi) 

761 hwfit._results.mle_retvals = opt 

762 return hwfit 

763 

764 def initial_values(self): 

765 """ 

766 Compute initial values used in the exponential smoothing recursions 

767 

768 Returns 

769 ------- 

770 initial_level : float 

771 The initial value used for the level component 

772 initial_slope : {float, None} 

773 The initial value used for the trend component 

774 initial_seasons : list 

775 The initial values used for the seasonal components 

776 

777 Notes 

778 ----- 

779 Convenience function the exposes the values used to initialize the 

780 recursions. When optimizing parameters these are used as starting 

781 values. 

782 

783 Method used to compute the initial value depends on when components 

784 are included in the model. In a simple exponential smoothing model 

785 without trend or a seasonal components, the initial value is set to the 

786 first observation. When a trend is added, the trend is initialized 

787 either using y[1]/y[0], if multiplicative, or y[1]-y[0]. When the 

788 seasonal component is added the initialization adapts to account for 

789 the modified structure. 

790 """ 

791 y = self._y 

792 trend = self.trend 

793 seasonal = self.seasonal 

794 seasoning = self.seasoning 

795 trending = self.trending 

796 m = self.seasonal_periods 

797 l0 = self._l0 

798 b0 = self._b0 

799 if seasoning: 

800 l0 = y[np.arange(self.nobs) % m == 0].mean() if l0 is None else l0 

801 if b0 is None and trending: 

802 lead, lag = y[m:m + m], y[:m] 

803 if trend == 'mul': 

804 b0 = np.exp((np.log(lead.mean()) - np.log(lag.mean())) / m) 

805 else: 

806 b0 = ((lead - lag) / m).mean() 

807 s0 = list(y[:m] / l0) if seasonal == 'mul' else list(y[:m] - l0) 

808 elif trending: 

809 l0 = y[0] if l0 is None else l0 

810 if b0 is None: 

811 b0 = y[1] / y[0] if trend == 'mul' else y[1] - y[0] 

812 s0 = [] 

813 else: 

814 if l0 is None: 

815 l0 = y[0] 

816 b0 = None 

817 s0 = [] 

818 

819 return l0, b0, s0 

820 

821 def _predict(self, h=None, smoothing_level=None, smoothing_slope=None, 

822 smoothing_seasonal=None, initial_level=None, initial_slope=None, 

823 damping_slope=None, initial_seasons=None, use_boxcox=None, lamda=None, 

824 remove_bias=None, is_optimized=None): 

825 """ 

826 Helper prediction function 

827 

828 Parameters 

829 ---------- 

830 h : int, optional 

831 The number of time steps to forecast ahead. 

832 """ 

833 # Variable renames to alpha, beta, etc as this helps with following the 

834 # mathematical notation in general 

835 alpha = smoothing_level 

836 beta = smoothing_slope 

837 gamma = smoothing_seasonal 

838 phi = damping_slope 

839 

840 # Start in sample and out of sample predictions 

841 data = self.endog 

842 damped = self.damped 

843 seasoning = self.seasoning 

844 trending = self.trending 

845 trend = self.trend 

846 seasonal = self.seasonal 

847 m = self.seasonal_periods 

848 phi = phi if damped else 1.0 

849 if use_boxcox == 'log': 

850 lamda = 0.0 

851 y = boxcox(data, 0.0) 

852 elif isinstance(use_boxcox, float): 

853 lamda = use_boxcox 

854 y = boxcox(data, lamda) 

855 elif use_boxcox: 

856 y, lamda = boxcox(data) 

857 else: 

858 lamda = None 

859 y = data.squeeze() 

860 if np.ndim(y) != 1: 

861 raise NotImplementedError('Only 1 dimensional data supported') 

862 y_alpha = np.zeros((self.nobs,)) 

863 y_gamma = np.zeros((self.nobs,)) 

864 alphac = 1 - alpha 

865 y_alpha[:] = alpha * y 

866 if trending: 

867 betac = 1 - beta 

868 if seasoning: 

869 gammac = 1 - gamma 

870 y_gamma[:] = gamma * y 

871 lvls = np.zeros((self.nobs + h + 1,)) 

872 b = np.zeros((self.nobs + h + 1,)) 

873 s = np.zeros((self.nobs + h + m + 1,)) 

874 lvls[0] = initial_level 

875 b[0] = initial_slope 

876 s[:m] = initial_seasons 

877 phi_h = np.cumsum(np.repeat(phi, h + 1)**np.arange(1, h + 1 + 1) 

878 ) if damped else np.arange(1, h + 1 + 1) 

879 trended = {'mul': np.multiply, 

880 'add': np.add, 

881 None: lambda l, b: l 

882 }[trend] 

883 detrend = {'mul': np.divide, 

884 'add': np.subtract, 

885 None: lambda l, b: 0 

886 }[trend] 

887 dampen = {'mul': np.power, 

888 'add': np.multiply, 

889 None: lambda b, phi: 0 

890 }[trend] 

891 nobs = self.nobs 

892 if seasonal == 'mul': 

893 for i in range(1, nobs + 1): 

894 lvls[i] = y_alpha[i - 1] / s[i - 1] + \ 

895 (alphac * trended(lvls[i - 1], dampen(b[i - 1], phi))) 

896 if trending: 

897 b[i] = (beta * detrend(lvls[i], lvls[i - 1])) + \ 

898 (betac * dampen(b[i - 1], phi)) 

899 s[i + m - 1] = y_gamma[i - 1] / trended(lvls[i - 1], dampen(b[i - 1], phi)) + \ 

900 (gammac * s[i - 1]) 

901 slope = b[1:nobs + 1].copy() 

902 season = s[m:nobs + m].copy() 

903 lvls[nobs:] = lvls[nobs] 

904 if trending: 

905 b[:nobs] = dampen(b[:nobs], phi) 

906 b[nobs:] = dampen(b[nobs], phi_h) 

907 trend = trended(lvls, b) 

908 s[nobs + m - 1:] = [s[(nobs - 1) + j % m] for j in range(h + 1 + 1)] 

909 fitted = trend * s[:-m] 

910 elif seasonal == 'add': 

911 for i in range(1, nobs + 1): 

912 lvls[i] = y_alpha[i - 1] - (alpha * s[i - 1]) + \ 

913 (alphac * trended(lvls[i - 1], dampen(b[i - 1], phi))) 

914 if trending: 

915 b[i] = (beta * detrend(lvls[i], lvls[i - 1])) + \ 

916 (betac * dampen(b[i - 1], phi)) 

917 s[i + m - 1] = y_gamma[i - 1] - \ 

918 (gamma * trended(lvls[i - 1], dampen(b[i - 1], phi))) + \ 

919 (gammac * s[i - 1]) 

920 slope = b[1:nobs + 1].copy() 

921 season = s[m:nobs + m].copy() 

922 lvls[nobs:] = lvls[nobs] 

923 if trending: 

924 b[:nobs] = dampen(b[:nobs], phi) 

925 b[nobs:] = dampen(b[nobs], phi_h) 

926 trend = trended(lvls, b) 

927 s[nobs + m - 1:] = [s[(nobs - 1) + j % m] for j in range(h + 1 + 1)] 

928 fitted = trend + s[:-m] 

929 else: 

930 for i in range(1, nobs + 1): 

931 lvls[i] = y_alpha[i - 1] + \ 

932 (alphac * trended(lvls[i - 1], dampen(b[i - 1], phi))) 

933 if trending: 

934 b[i] = (beta * detrend(lvls[i], lvls[i - 1])) + \ 

935 (betac * dampen(b[i - 1], phi)) 

936 slope = b[1:nobs + 1].copy() 

937 season = s[m:nobs + m].copy() 

938 lvls[nobs:] = lvls[nobs] 

939 if trending: 

940 b[:nobs] = dampen(b[:nobs], phi) 

941 b[nobs:] = dampen(b[nobs], phi_h) 

942 trend = trended(lvls, b) 

943 fitted = trend 

944 level = lvls[1:nobs + 1].copy() 

945 if use_boxcox or use_boxcox == 'log' or isinstance(use_boxcox, float): 

946 fitted = inv_boxcox(fitted, lamda) 

947 level = inv_boxcox(level, lamda) 

948 slope = detrend(trend[:nobs], level) 

949 if seasonal == 'add': 

950 season = (fitted - inv_boxcox(trend, lamda))[:nobs] 

951 else: # seasonal == 'mul': 

952 season = (fitted / inv_boxcox(trend, lamda))[:nobs] 

953 sse = sqeuclidean(fitted[:-h - 1], data) 

954 # (s0 + gamma) + (b0 + beta) + (l0 + alpha) + phi 

955 k = m * seasoning + 2 * trending + 2 + 1 * damped 

956 aic = self.nobs * np.log(sse / self.nobs) + k * 2 

957 if self.nobs - k - 3 > 0: 

958 aicc_penalty = (2 * (k + 2) * (k + 3)) / (self.nobs - k - 3) 

959 else: 

960 aicc_penalty = np.inf 

961 aicc = aic + aicc_penalty 

962 bic = self.nobs * np.log(sse / self.nobs) + k * np.log(self.nobs) 

963 resid = data - fitted[:-h - 1] 

964 if remove_bias: 

965 fitted += resid.mean() 

966 self.params = {'smoothing_level': alpha, 

967 'smoothing_slope': beta, 

968 'smoothing_seasonal': gamma, 

969 'damping_slope': phi if damped else np.nan, 

970 'initial_level': lvls[0], 

971 'initial_slope': b[0] / phi if phi > 0 else 0, 

972 'initial_seasons': s[:m], 

973 'use_boxcox': use_boxcox, 

974 'lamda': lamda, 

975 'remove_bias': remove_bias} 

976 

977 # Format parameters into a DataFrame 

978 codes = ['alpha', 'beta', 'gamma', 'l.0', 'b.0', 'phi'] 

979 codes += ['s.{0}'.format(i) for i in range(m)] 

980 idx = ['smoothing_level', 'smoothing_slope', 'smoothing_seasonal', 

981 'initial_level', 'initial_slope', 'damping_slope'] 

982 idx += ['initial_seasons.{0}'.format(i) for i in range(m)] 

983 

984 formatted = [alpha, beta, gamma, lvls[0], b[0], phi] 

985 formatted += s[:m].tolist() 

986 formatted = list(map(lambda v: np.nan if v is None else v, formatted)) 

987 formatted = np.array(formatted) 

988 if is_optimized is None: 

989 optimized = np.zeros(len(codes), dtype=np.bool) 

990 else: 

991 optimized = is_optimized.astype(np.bool) 

992 included = [True, trending, seasoning, True, trending, damped] 

993 included += [True] * m 

994 formatted = pd.DataFrame([[c, f, o] for c, f, o in zip(codes, formatted, optimized)], 

995 columns=['name', 'param', 'optimized'], 

996 index=idx) 

997 formatted = formatted.loc[included] 

998 

999 hwfit = HoltWintersResults(self, self.params, fittedfcast=fitted, 

1000 fittedvalues=fitted[:-h - 1], fcastvalues=fitted[-h - 1:], 

1001 sse=sse, level=level, slope=slope, season=season, aic=aic, 

1002 bic=bic, aicc=aicc, resid=resid, k=k, 

1003 params_formatted=formatted, optimized=optimized) 

1004 return HoltWintersResultsWrapper(hwfit) 

1005 

1006 

1007class SimpleExpSmoothing(ExponentialSmoothing): 

1008 """ 

1009 Simple Exponential Smoothing 

1010 

1011 Parameters 

1012 ---------- 

1013 endog : array_like 

1014 Time series 

1015 

1016 Returns 

1017 ------- 

1018 results : SimpleExpSmoothing class 

1019 

1020 Notes 

1021 ----- 

1022 This is a full implementation of the simple exponential smoothing as 

1023 per [1]_. `SimpleExpSmoothing` is a restricted version of 

1024 :class:`ExponentialSmoothing`. 

1025 

1026 See Also 

1027 -------- 

1028 ExponentialSmoothing 

1029 Holt 

1030 

1031 References 

1032 ---------- 

1033 .. [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles 

1034 and practice. OTexts, 2014. 

1035 """ 

1036 

1037 def __init__(self, endog): 

1038 super(SimpleExpSmoothing, self).__init__(endog) 

1039 

1040 def fit(self, smoothing_level=None, optimized=True, start_params=None, 

1041 initial_level=None, use_brute=True): 

1042 """ 

1043 Fit the model 

1044 

1045 Parameters 

1046 ---------- 

1047 smoothing_level : float, optional 

1048 The smoothing_level value of the simple exponential smoothing, if 

1049 the value is set then this value will be used as the value. 

1050 optimized : bool, optional 

1051 Estimate model parameters by maximizing the log-likelihood 

1052 start_params : ndarray, optional 

1053 Starting values to used when optimizing the fit. If not provided, 

1054 starting values are determined using a combination of grid search 

1055 and reasonable values based on the initial values of the data 

1056 initial_level : float, optional 

1057 Value to use when initializing the fitted level. 

1058 use_brute : bool, optional 

1059 Search for good starting values using a brute force (grid) 

1060 optimizer. If False, a naive set of starting values is used. 

1061 

1062 Returns 

1063 ------- 

1064 results : HoltWintersResults class 

1065 See statsmodels.tsa.holtwinters.HoltWintersResults 

1066 

1067 Notes 

1068 ----- 

1069 This is a full implementation of the simple exponential smoothing as 

1070 per [1]. 

1071 

1072 References 

1073 ---------- 

1074 [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles 

1075 and practice. OTexts, 2014. 

1076 """ 

1077 return super(SimpleExpSmoothing, self).fit(smoothing_level=smoothing_level, 

1078 optimized=optimized, start_params=start_params, 

1079 initial_level=initial_level, 

1080 use_brute=use_brute) 

1081 

1082 

1083class Holt(ExponentialSmoothing): 

1084 """ 

1085 Holt's Exponential Smoothing 

1086 

1087 Parameters 

1088 ---------- 

1089 endog : array_like 

1090 Time series 

1091 exponential : bool, optional 

1092 Type of trend component. 

1093 damped : bool, optional 

1094 Should the trend component be damped. 

1095 

1096 Returns 

1097 ------- 

1098 results : Holt class 

1099 

1100 Notes 

1101 ----- 

1102 This is a full implementation of the Holt's exponential smoothing as 

1103 per [1]_. `Holt` is a restricted version of :class:`ExponentialSmoothing`. 

1104 

1105 See Also 

1106 -------- 

1107 ExponentialSmoothing 

1108 SimpleExpSmoothing 

1109 

1110 References 

1111 ---------- 

1112 .. [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles 

1113 and practice. OTexts, 2014. 

1114 """ 

1115 

1116 def __init__(self, endog, exponential=False, damped=False): 

1117 trend = 'mul' if exponential else 'add' 

1118 super(Holt, self).__init__(endog, trend=trend, damped=damped) 

1119 

1120 def fit(self, smoothing_level=None, smoothing_slope=None, damping_slope=None, 

1121 optimized=True, start_params=None, initial_level=None, 

1122 initial_slope=None, use_brute=True): 

1123 """ 

1124 Fit the model 

1125 

1126 Parameters 

1127 ---------- 

1128 smoothing_level : float, optional 

1129 The alpha value of the simple exponential smoothing, if the value 

1130 is set then this value will be used as the value. 

1131 smoothing_slope : float, optional 

1132 The beta value of the Holt's trend method, if the value is 

1133 set then this value will be used as the value. 

1134 damping_slope : float, optional 

1135 The phi value of the damped method, if the value is 

1136 set then this value will be used as the value. 

1137 optimized : bool, optional 

1138 Estimate model parameters by maximizing the log-likelihood 

1139 start_params : ndarray, optional 

1140 Starting values to used when optimizing the fit. If not provided, 

1141 starting values are determined using a combination of grid search 

1142 and reasonable values based on the initial values of the data 

1143 initial_level : float, optional 

1144 Value to use when initializing the fitted level. 

1145 initial_slope : float, optional 

1146 Value to use when initializing the fitted slope. 

1147 use_brute : bool, optional 

1148 Search for good starting values using a brute force (grid) 

1149 optimizer. If False, a naive set of starting values is used. 

1150 

1151 Returns 

1152 ------- 

1153 results : HoltWintersResults class 

1154 See statsmodels.tsa.holtwinters.HoltWintersResults 

1155 

1156 Notes 

1157 ----- 

1158 This is a full implementation of the Holt's exponential smoothing as 

1159 per [1]. 

1160 

1161 References 

1162 ---------- 

1163 [1] Hyndman, Rob J., and George Athanasopoulos. Forecasting: principles 

1164 and practice. OTexts, 2014. 

1165 """ 

1166 return super(Holt, self).fit(smoothing_level=smoothing_level, 

1167 smoothing_slope=smoothing_slope, damping_slope=damping_slope, 

1168 optimized=optimized, start_params=start_params, 

1169 initial_level=initial_level, initial_slope=initial_slope, use_brute=use_brute)