Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#Splitting out maringal effects to see if they can be generalized 

2 

3from statsmodels.compat.python import lzip 

4import numpy as np 

5from scipy.stats import norm 

6from statsmodels.tools.decorators import cache_readonly 

7 

8#### margeff helper functions #### 

9#NOTE: todo marginal effects for group 2 

10# group 2 oprobit, ologit, gologit, mlogit, biprobit 

11 

12def _check_margeff_args(at, method): 

13 """ 

14 Checks valid options for margeff 

15 """ 

16 if at not in ['overall','mean','median','zero','all']: 

17 raise ValueError("%s not a valid option for `at`." % at) 

18 if method not in ['dydx','eyex','dyex','eydx']: 

19 raise ValueError("method is not understood. Got %s" % method) 

20 

21def _check_discrete_args(at, method): 

22 """ 

23 Checks the arguments for margeff if the exogenous variables are discrete. 

24 """ 

25 if method in ['dyex','eyex']: 

26 raise ValueError("%s not allowed for discrete variables" % method) 

27 if at in ['median', 'zero']: 

28 raise ValueError("%s not allowed for discrete variables" % at) 

29 

30def _get_const_index(exog): 

31 """ 

32 Returns a boolean array of non-constant column indices in exog and 

33 an scalar array of where the constant is or None 

34 """ 

35 effects_idx = exog.var(0) != 0 

36 if np.any(~effects_idx): 

37 const_idx = np.where(~effects_idx)[0] 

38 else: 

39 const_idx = None 

40 return effects_idx, const_idx 

41 

42def _isdummy(X): 

43 """ 

44 Given an array X, returns the column indices for the dummy variables. 

45 

46 Parameters 

47 ---------- 

48 X : array_like 

49 A 1d or 2d array of numbers 

50 

51 Examples 

52 -------- 

53 >>> X = np.random.randint(0, 2, size=(15,5)).astype(float) 

54 >>> X[:,1:3] = np.random.randn(15,2) 

55 >>> ind = _isdummy(X) 

56 >>> ind 

57 array([0, 3, 4]) 

58 """ 

59 X = np.asarray(X) 

60 if X.ndim > 1: 

61 ind = np.zeros(X.shape[1]).astype(bool) 

62 max = (np.max(X, axis=0) == 1) 

63 min = (np.min(X, axis=0) == 0) 

64 remainder = np.all(X % 1. == 0, axis=0) 

65 ind = min & max & remainder 

66 if X.ndim == 1: 

67 ind = np.asarray([ind]) 

68 return np.where(ind)[0] 

69 

70def _get_dummy_index(X, const_idx): 

71 dummy_ind = _isdummy(X) 

72 dummy = True 

73 

74 if dummy_ind.size == 0: # do not waste your time 

75 dummy = False 

76 dummy_ind = None # this gets passed to stand err func 

77 return dummy_ind, dummy 

78 

79def _iscount(X): 

80 """ 

81 Given an array X, returns the column indices for count variables. 

82 

83 Parameters 

84 ---------- 

85 X : array_like 

86 A 1d or 2d array of numbers 

87 

88 Examples 

89 -------- 

90 >>> X = np.random.randint(0, 10, size=(15,5)).astype(float) 

91 >>> X[:,1:3] = np.random.randn(15,2) 

92 >>> ind = _iscount(X) 

93 >>> ind 

94 array([0, 3, 4]) 

95 """ 

96 X = np.asarray(X) 

97 remainder = np.logical_and(np.logical_and(np.all(X % 1. == 0, axis = 0), 

98 X.var(0) != 0), np.all(X >= 0, axis=0)) 

99 dummy = _isdummy(X) 

100 remainder = np.where(remainder)[0].tolist() 

101 for idx in dummy: 

102 remainder.remove(idx) 

103 return np.array(remainder) 

104 

105def _get_count_index(X, const_idx): 

106 count_ind = _iscount(X) 

107 count = True 

108 

109 if count_ind.size == 0: # do not waste your time 

110 count = False 

111 count_ind = None # for stand err func 

112 return count_ind, count 

113 

114def _get_margeff_exog(exog, at, atexog, ind): 

115 if atexog is not None: # user supplied 

116 if isinstance(atexog, dict): 

117 # assumes values are singular or of len(exog) 

118 for key in atexog: 

119 exog[:,key] = atexog[key] 

120 elif isinstance(atexog, np.ndarray): #TODO: handle DataFrames 

121 if atexog.ndim == 1: 

122 k_vars = len(atexog) 

123 else: 

124 k_vars = atexog.shape[1] 

125 try: 

126 assert k_vars == exog.shape[1] 

127 except: 

128 raise ValueError("atexog does not have the same number " 

129 "of variables as exog") 

130 exog = atexog 

131 

132 #NOTE: we should fill in atexog after we process at 

133 if at == 'mean': 

134 exog = np.atleast_2d(exog.mean(0)) 

135 elif at == 'median': 

136 exog = np.atleast_2d(np.median(exog, axis=0)) 

137 elif at == 'zero': 

138 exog = np.zeros((1,exog.shape[1])) 

139 exog[0,~ind] = 1 

140 return exog 

141 

142def _get_count_effects(effects, exog, count_ind, method, model, params): 

143 """ 

144 If there's a count variable, the predicted difference is taken by 

145 subtracting one and adding one to exog then averaging the difference 

146 """ 

147 # this is the index for the effect and the index for count col in exog 

148 for i in count_ind: 

149 exog0 = exog.copy() 

150 exog0[:, i] -= 1 

151 effect0 = model.predict(params, exog0) 

152 exog0[:, i] += 2 

153 effect1 = model.predict(params, exog0) 

154 #NOTE: done by analogy with dummy effects but untested bc 

155 # stata does not handle both count and eydx anywhere 

156 if 'ey' in method: 

157 effect0 = np.log(effect0) 

158 effect1 = np.log(effect1) 

159 effects[:, i] = ((effect1 - effect0)/2) 

160 return effects 

161 

162def _get_dummy_effects(effects, exog, dummy_ind, method, model, params): 

163 """ 

164 If there's a dummy variable, the predicted difference is taken at 

165 0 and 1 

166 """ 

167 # this is the index for the effect and the index for dummy col in exog 

168 for i in dummy_ind: 

169 exog0 = exog.copy() # only copy once, can we avoid a copy? 

170 exog0[:,i] = 0 

171 effect0 = model.predict(params, exog0) 

172 #fittedvalues0 = np.dot(exog0,params) 

173 exog0[:,i] = 1 

174 effect1 = model.predict(params, exog0) 

175 if 'ey' in method: 

176 effect0 = np.log(effect0) 

177 effect1 = np.log(effect1) 

178 effects[:, i] = (effect1 - effect0) 

179 return effects 

180 

181def _effects_at(effects, at): 

182 if at == 'all': 

183 effects = effects 

184 elif at == 'overall': 

185 effects = effects.mean(0) 

186 else: 

187 effects = effects[0,:] 

188 return effects 

189 

190def _margeff_cov_params_dummy(model, cov_margins, params, exog, dummy_ind, 

191 method, J): 

192 r""" 

193 Returns the Jacobian for discrete regressors for use in margeff_cov_params. 

194 

195 For discrete regressors the marginal effect is 

196 

197 \Delta F = F(XB) | d = 1 - F(XB) | d = 0 

198 

199 The row of the Jacobian for this variable is given by 

200 

201 f(XB)*X | d = 1 - f(XB)*X | d = 0 

202 

203 Where F is the default prediction of the model. 

204 """ 

205 for i in dummy_ind: 

206 exog0 = exog.copy() 

207 exog1 = exog.copy() 

208 exog0[:,i] = 0 

209 exog1[:,i] = 1 

210 dfdb0 = model._derivative_predict(params, exog0, method) 

211 dfdb1 = model._derivative_predict(params, exog1, method) 

212 dfdb = (dfdb1 - dfdb0) 

213 if dfdb.ndim >= 2: # for overall 

214 dfdb = dfdb.mean(0) 

215 if J > 1: 

216 K = dfdb.shape[1] // (J-1) 

217 cov_margins[i::K, :] = dfdb 

218 else: 

219 # dfdb could be too short if there are extra params, k_extra > 0 

220 cov_margins[i, :len(dfdb)] = dfdb # how each F changes with change in B 

221 return cov_margins 

222 

223def _margeff_cov_params_count(model, cov_margins, params, exog, count_ind, 

224 method, J): 

225 r""" 

226 Returns the Jacobian for discrete regressors for use in margeff_cov_params. 

227 

228 For discrete regressors the marginal effect is 

229 

230 \Delta F = F(XB) | d += 1 - F(XB) | d -= 1 

231 

232 The row of the Jacobian for this variable is given by 

233 

234 (f(XB)*X | d += 1 - f(XB)*X | d -= 1) / 2 

235 

236 where F is the default prediction for the model. 

237 """ 

238 for i in count_ind: 

239 exog0 = exog.copy() 

240 exog0[:,i] -= 1 

241 dfdb0 = model._derivative_predict(params, exog0, method) 

242 exog0[:,i] += 2 

243 dfdb1 = model._derivative_predict(params, exog0, method) 

244 dfdb = (dfdb1 - dfdb0) 

245 if dfdb.ndim >= 2: # for overall 

246 dfdb = dfdb.mean(0) / 2 

247 if J > 1: 

248 K = dfdb.shape[1] / (J-1) 

249 cov_margins[i::K, :] = dfdb 

250 else: 

251 # dfdb could be too short if there are extra params, k_extra > 0 

252 cov_margins[i, :len(dfdb)] = dfdb # how each F changes with change in B 

253 return cov_margins 

254 

255def margeff_cov_params(model, params, exog, cov_params, at, derivative, 

256 dummy_ind, count_ind, method, J): 

257 """ 

258 Computes the variance-covariance of marginal effects by the delta method. 

259 

260 Parameters 

261 ---------- 

262 model : model instance 

263 The model that returned the fitted results. Its pdf method is used 

264 for computing the Jacobian of discrete variables in dummy_ind and 

265 count_ind 

266 params : array_like 

267 estimated model parameters 

268 exog : array_like 

269 exogenous variables at which to calculate the derivative 

270 cov_params : array_like 

271 The variance-covariance of the parameters 

272 at : str 

273 Options are: 

274 

275 - 'overall', The average of the marginal effects at each 

276 observation. 

277 - 'mean', The marginal effects at the mean of each regressor. 

278 - 'median', The marginal effects at the median of each regressor. 

279 - 'zero', The marginal effects at zero for each regressor. 

280 - 'all', The marginal effects at each observation. 

281 

282 Only overall has any effect here.you 

283 

284 derivative : function or array_like 

285 If a function, it returns the marginal effects of the model with 

286 respect to the exogenous variables evaluated at exog. Expected to be 

287 called derivative(params, exog). This will be numerically 

288 differentiated. Otherwise, it can be the Jacobian of the marginal 

289 effects with respect to the parameters. 

290 dummy_ind : array_like 

291 Indices of the columns of exog that contain dummy variables 

292 count_ind : array_like 

293 Indices of the columns of exog that contain count variables 

294 

295 Notes 

296 ----- 

297 For continuous regressors, the variance-covariance is given by 

298 

299 Asy. Var[MargEff] = [d margeff / d params] V [d margeff / d params]' 

300 

301 where V is the parameter variance-covariance. 

302 

303 The outer Jacobians are computed via numerical differentiation if 

304 derivative is a function. 

305 """ 

306 if callable(derivative): 

307 from statsmodels.tools.numdiff import approx_fprime_cs 

308 params = params.ravel('F') # for Multinomial 

309 try: 

310 jacobian_mat = approx_fprime_cs(params, derivative, 

311 args=(exog,method)) 

312 except TypeError: # norm.cdf does not take complex values 

313 from statsmodels.tools.numdiff import approx_fprime 

314 jacobian_mat = approx_fprime(params, derivative, 

315 args=(exog,method)) 

316 if at == 'overall': 

317 jacobian_mat = np.mean(jacobian_mat, axis=1) 

318 else: 

319 jacobian_mat = jacobian_mat.squeeze() # exog was 2d row vector 

320 if dummy_ind is not None: 

321 jacobian_mat = _margeff_cov_params_dummy(model, jacobian_mat, 

322 params, exog, dummy_ind, method, J) 

323 if count_ind is not None: 

324 jacobian_mat = _margeff_cov_params_count(model, jacobian_mat, 

325 params, exog, count_ind, method, J) 

326 else: 

327 jacobian_mat = derivative 

328 

329 #NOTE: this will not go through for at == 'all' 

330 return np.dot(np.dot(jacobian_mat, cov_params), jacobian_mat.T) 

331 

332def margeff_cov_with_se(model, params, exog, cov_params, at, derivative, 

333 dummy_ind, count_ind, method, J): 

334 """ 

335 See margeff_cov_params. 

336 

337 Same function but returns both the covariance of the marginal effects 

338 and their standard errors. 

339 """ 

340 cov_me = margeff_cov_params(model, params, exog, cov_params, at, 

341 derivative, dummy_ind, 

342 count_ind, method, J) 

343 return cov_me, np.sqrt(np.diag(cov_me)) 

344 

345 

346def margeff(): 

347 raise NotImplementedError 

348 

349 

350 

351def _check_at_is_all(method): 

352 if method['at'] == 'all': 

353 raise ValueError("Only margeff are available when `at` is " 

354 "'all'. Please input specific points if you would " 

355 "like to do inference.") 

356 

357 

358_transform_names = dict(dydx='dy/dx', 

359 eyex='d(lny)/d(lnx)', 

360 dyex='dy/d(lnx)', 

361 eydx='d(lny)/dx') 

362 

363class Margins(object): 

364 """ 

365 Mostly a do nothing class. Lays out the methods expected of a sub-class. 

366 

367 This is just a sketch of what we may want out of a general margins class. 

368 I (SS) need to look at details of other models. 

369 """ 

370 def __init__(self, results, get_margeff, derivative, dist=None, 

371 margeff_args=()): 

372 self._cache = {} 

373 self.results = results 

374 self.dist = dist 

375 self.get_margeff(margeff_args) 

376 

377 def _reset(self): 

378 self._cache = {} 

379 

380 def get_margeff(self, *args, **kwargs): 

381 self._reset() 

382 self.margeff = self.get_margeff(*args) 

383 

384 @cache_readonly 

385 def tvalues(self): 

386 raise NotImplementedError 

387 

388 @cache_readonly 

389 def cov_margins(self): 

390 raise NotImplementedError 

391 

392 @cache_readonly 

393 def margins_se(self): 

394 raise NotImplementedError 

395 

396 def summary_frame(self): 

397 raise NotImplementedError 

398 

399 @cache_readonly 

400 def pvalues(self): 

401 raise NotImplementedError 

402 

403 def conf_int(self, alpha=.05): 

404 raise NotImplementedError 

405 

406 def summary(self, alpha=.05): 

407 raise NotImplementedError 

408 

409#class DiscreteMargins(Margins): 

410class DiscreteMargins(object): 

411 """Get marginal effects of a Discrete Choice model. 

412 

413 Parameters 

414 ---------- 

415 results : DiscreteResults instance 

416 The results instance of a fitted discrete choice model 

417 args : tuple 

418 Args are passed to `get_margeff`. This is the same as 

419 results.get_margeff. See there for more information. 

420 kwargs : dict 

421 Keyword args are passed to `get_margeff`. This is the same as 

422 results.get_margeff. See there for more information. 

423 """ 

424 def __init__(self, results, args, kwargs={}): 

425 self._cache = {} 

426 self.results = results 

427 self.get_margeff(*args, **kwargs) 

428 

429 def _reset(self): 

430 self._cache = {} 

431 

432 @cache_readonly 

433 def tvalues(self): 

434 _check_at_is_all(self.margeff_options) 

435 return self.margeff / self.margeff_se 

436 

437 def summary_frame(self, alpha=.05): 

438 """ 

439 Returns a DataFrame summarizing the marginal effects. 

440 

441 Parameters 

442 ---------- 

443 alpha : float 

444 Number between 0 and 1. The confidence intervals have the 

445 probability 1-alpha. 

446 

447 Returns 

448 ------- 

449 frame : DataFrames 

450 A DataFrame summarizing the marginal effects. 

451 

452 Notes 

453 ----- 

454 The dataframe is created on each call and not cached, as are the 

455 tables build in `summary()` 

456 """ 

457 _check_at_is_all(self.margeff_options) 

458 results = self.results 

459 model = self.results.model 

460 from pandas import DataFrame, MultiIndex 

461 names = [_transform_names[self.margeff_options['method']], 

462 'Std. Err.', 'z', 'Pr(>|z|)', 

463 'Conf. Int. Low', 'Cont. Int. Hi.'] 

464 ind = self.results.model.exog.var(0) != 0 # True if not a constant 

465 exog_names = self.results.model.exog_names 

466 k_extra = getattr(model, 'k_extra', 0) 

467 if k_extra > 0: 

468 exog_names = exog_names[:-k_extra] 

469 var_names = [name for i,name in enumerate(exog_names) if ind[i]] 

470 

471 if self.margeff.ndim == 2: 

472 # MNLogit case 

473 ci = self.conf_int(alpha) 

474 table = np.column_stack([i.ravel("F") for i in 

475 [self.margeff, self.margeff_se, self.tvalues, 

476 self.pvalues, ci[:, 0, :], ci[:, 1, :]]]) 

477 

478 _, yname_list = results._get_endog_name(model.endog_names, 

479 None, all=True) 

480 ynames = np.repeat(yname_list, len(var_names)) 

481 xnames = np.tile(var_names, len(yname_list)) 

482 index = MultiIndex.from_tuples(list(zip(ynames, xnames)), 

483 names=['endog', 'exog']) 

484 else: 

485 table = np.column_stack((self.margeff, self.margeff_se, self.tvalues, 

486 self.pvalues, self.conf_int(alpha))) 

487 index=var_names 

488 

489 return DataFrame(table, columns=names, index=index) 

490 

491 

492 @cache_readonly 

493 def pvalues(self): 

494 _check_at_is_all(self.margeff_options) 

495 return norm.sf(np.abs(self.tvalues)) * 2 

496 

497 def conf_int(self, alpha=.05): 

498 """ 

499 Returns the confidence intervals of the marginal effects 

500 

501 Parameters 

502 ---------- 

503 alpha : float 

504 Number between 0 and 1. The confidence intervals have the 

505 probability 1-alpha. 

506 

507 Returns 

508 ------- 

509 conf_int : ndarray 

510 An array with lower, upper confidence intervals for the marginal 

511 effects. 

512 """ 

513 _check_at_is_all(self.margeff_options) 

514 me_se = self.margeff_se 

515 q = norm.ppf(1 - alpha / 2) 

516 lower = self.margeff - q * me_se 

517 upper = self.margeff + q * me_se 

518 return np.asarray(lzip(lower, upper)) 

519 

520 def summary(self, alpha=.05): 

521 """ 

522 Returns a summary table for marginal effects 

523 

524 Parameters 

525 ---------- 

526 alpha : float 

527 Number between 0 and 1. The confidence intervals have the 

528 probability 1-alpha. 

529 

530 Returns 

531 ------- 

532 Summary : SummaryTable 

533 A SummaryTable instance 

534 """ 

535 _check_at_is_all(self.margeff_options) 

536 results = self.results 

537 model = results.model 

538 title = model.__class__.__name__ + " Marginal Effects" 

539 method = self.margeff_options['method'] 

540 top_left = [('Dep. Variable:', [model.endog_names]), 

541 ('Method:', [method]), 

542 ('At:', [self.margeff_options['at']]),] 

543 

544 from statsmodels.iolib.summary import (Summary, summary_params, 

545 table_extend) 

546 exog_names = model.exog_names[:] # copy 

547 smry = Summary() 

548 

549 # TODO: sigh, we really need to hold on to this in _data... 

550 _, const_idx = _get_const_index(model.exog) 

551 if const_idx is not None: 

552 exog_names.pop(const_idx[0]) 

553 if getattr(model, 'k_extra', 0) > 0: 

554 exog_names = exog_names[:-model.k_extra] 

555 

556 J = int(getattr(model, "J", 1)) 

557 if J > 1: 

558 yname, yname_list = results._get_endog_name(model.endog_names, 

559 None, all=True) 

560 else: 

561 yname = model.endog_names 

562 yname_list = [yname] 

563 

564 smry.add_table_2cols(self, gleft=top_left, gright=[], 

565 yname=yname, xname=exog_names, title=title) 

566 

567 # NOTE: add_table_params is not general enough yet for margeff 

568 # could use a refactor with getattr instead of hard-coded params 

569 # tvalues etc. 

570 table = [] 

571 conf_int = self.conf_int(alpha) 

572 margeff = self.margeff 

573 margeff_se = self.margeff_se 

574 tvalues = self.tvalues 

575 pvalues = self.pvalues 

576 if J > 1: 

577 for eq in range(J): 

578 restup = (results, margeff[:,eq], margeff_se[:,eq], 

579 tvalues[:,eq], pvalues[:,eq], conf_int[:,:,eq]) 

580 tble = summary_params(restup, yname=yname_list[eq], 

581 xname=exog_names, alpha=alpha, use_t=False, 

582 skip_header=True) 

583 tble.title = yname_list[eq] 

584 # overwrite coef with method name 

585 header = ['', _transform_names[method], 'std err', 'z', 

586 'P>|z|', '[' + str(alpha/2), str(1-alpha/2) + ']'] 

587 tble.insert_header_row(0, header) 

588 table.append(tble) 

589 

590 table = table_extend(table, keep_headers=True) 

591 else: 

592 restup = (results, margeff, margeff_se, tvalues, pvalues, conf_int) 

593 table = summary_params(restup, yname=yname, xname=exog_names, 

594 alpha=alpha, use_t=False, skip_header=True) 

595 header = ['', _transform_names[method], 'std err', 'z', 

596 'P>|z|', '[' + str(alpha/2), str(1-alpha/2) + ']'] 

597 table.insert_header_row(0, header) 

598 

599 smry.tables.append(table) 

600 return smry 

601 

602 def get_margeff(self, at='overall', method='dydx', atexog=None, 

603 dummy=False, count=False): 

604 """Get marginal effects of the fitted model. 

605 

606 Parameters 

607 ---------- 

608 at : str, optional 

609 Options are: 

610 

611 - 'overall', The average of the marginal effects at each 

612 observation. 

613 - 'mean', The marginal effects at the mean of each regressor. 

614 - 'median', The marginal effects at the median of each regressor. 

615 - 'zero', The marginal effects at zero for each regressor. 

616 - 'all', The marginal effects at each observation. If `at` is all 

617 only margeff will be available. 

618 

619 Note that if `exog` is specified, then marginal effects for all 

620 variables not specified by `exog` are calculated using the `at` 

621 option. 

622 method : str, optional 

623 Options are: 

624 

625 - 'dydx' - dy/dx - No transformation is made and marginal effects 

626 are returned. This is the default. 

627 - 'eyex' - estimate elasticities of variables in `exog` -- 

628 d(lny)/d(lnx) 

629 - 'dyex' - estimate semi-elasticity -- dy/d(lnx) 

630 - 'eydx' - estimate semi-elasticity -- d(lny)/dx 

631 

632 Note that tranformations are done after each observation is 

633 calculated. Semi-elasticities for binary variables are computed 

634 using the midpoint method. 'dyex' and 'eyex' do not make sense 

635 for discrete variables. 

636 atexog : array_like, optional 

637 Optionally, you can provide the exogenous variables over which to 

638 get the marginal effects. This should be a dictionary with the key 

639 as the zero-indexed column number and the value of the dictionary. 

640 Default is None for all independent variables less the constant. 

641 dummy : bool, optional 

642 If False, treats binary variables (if present) as continuous. This 

643 is the default. Else if True, treats binary variables as 

644 changing from 0 to 1. Note that any variable that is either 0 or 1 

645 is treated as binary. Each binary variable is treated separately 

646 for now. 

647 count : bool, optional 

648 If False, treats count variables (if present) as continuous. This 

649 is the default. Else if True, the marginal effect is the 

650 change in probabilities when each observation is increased by one. 

651 

652 Returns 

653 ------- 

654 effects : ndarray 

655 the marginal effect corresponding to the input options 

656 

657 Notes 

658 ----- 

659 When using after Poisson, returns the expected number of events 

660 per period, assuming that the model is loglinear. 

661 """ 

662 self._reset() # always reset the cache when this is called 

663 #TODO: if at is not all or overall, we can also put atexog values 

664 # in summary table head 

665 method = method.lower() 

666 at = at.lower() 

667 _check_margeff_args(at, method) 

668 self.margeff_options = dict(method=method, at=at) 

669 results = self.results 

670 model = results.model 

671 params = results.params 

672 exog = model.exog.copy() # copy because values are changed 

673 effects_idx, const_idx = _get_const_index(exog) 

674 if hasattr(model, 'k_extra') and model.k_extra > 0: 

675 effects_idx = np.concatenate((effects_idx, np.zeros(model.k_extra, np.bool_))) 

676 

677 if dummy: 

678 _check_discrete_args(at, method) 

679 dummy_idx, dummy = _get_dummy_index(exog, const_idx) 

680 else: 

681 dummy_idx = None 

682 

683 if count: 

684 _check_discrete_args(at, method) 

685 count_idx, count = _get_count_index(exog, const_idx) 

686 else: 

687 count_idx = None 

688 

689 # attach dummy_idx and cout_idx 

690 self.dummy_idx = dummy_idx 

691 self.count_idx = count_idx 

692 

693 # get the exogenous variables 

694 exog = _get_margeff_exog(exog, at, atexog, effects_idx) 

695 

696 # get base marginal effects, handled by sub-classes 

697 effects = model._derivative_exog(params, exog, method, 

698 dummy_idx, count_idx) 

699 

700 J = getattr(model, 'J', 1) 

701 effects_idx = np.tile(effects_idx, J) # adjust for multi-equation. 

702 

703 effects = _effects_at(effects, at) 

704 

705 if at == 'all': 

706 if J > 1: 

707 K = model.K - np.any(~effects_idx) # subtract constant 

708 self.margeff = effects[:, effects_idx].reshape(-1, K, J, 

709 order='F') 

710 else: 

711 self.margeff = effects[:, effects_idx] 

712 else: 

713 # Set standard error of the marginal effects by Delta method. 

714 margeff_cov, margeff_se = margeff_cov_with_se(model, params, exog, 

715 results.cov_params(), at, 

716 model._derivative_exog, 

717 dummy_idx, count_idx, 

718 method, J) 

719 

720 # reshape for multi-equation 

721 if J > 1: 

722 K = model.K - np.any(~effects_idx) # subtract constant 

723 self.margeff = effects[effects_idx].reshape(K, J, order='F') 

724 self.margeff_se = margeff_se[effects_idx].reshape(K, J, 

725 order='F') 

726 self.margeff_cov = margeff_cov[effects_idx][:, effects_idx] 

727 else: 

728 # do not care about at constant 

729 # hack truncate effects_idx again if necessary 

730 # if eyex, then effects is truncated to be without extra params 

731 effects_idx = effects_idx[:len(effects)] 

732 self.margeff_cov = margeff_cov[effects_idx][:, effects_idx] 

733 self.margeff_se = margeff_se[effects_idx] 

734 self.margeff = effects[effects_idx]