Coverage for /home/martinb/.local/share/virtualenvs/camcops/lib/python3.6/site-packages/statsmodels/emplike/elregress.py : 17%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1"""
2Empirical Likelihood Linear Regression Inference
4The script contains the function that is optimized over nuisance parameters to
5 conduct inference on linear regression parameters. It is called by eltest
6in OLSResults.
9General References
10-----------------
12Owen, A.B.(2001). Empirical Likelihood. Chapman and Hall
14"""
15import numpy as np
16from statsmodels.emplike.descriptive import _OptFuncts
20class _ELRegOpts(_OptFuncts):
21 """
23 A class that holds functions to be optimized over when conducting
24 hypothesis tests and calculating confidence intervals.
26 Parameters
27 ----------
29 OLSResults : Results instance
30 A fitted OLS result.
31 """
32 def __init__(self):
33 pass
35 def _opt_nuis_regress(self, nuisance_params, param_nums=None,
36 endog=None, exog=None,
37 nobs=None, nvar=None, params=None, b0_vals=None,
38 stochastic_exog=None):
39 """
40 A function that is optimized over nuisance parameters to conduct a
41 hypothesis test for the parameters of interest.
43 Parameters
44 ----------
45 nuisance_params: 1darray
46 Parameters to be optimized over.
48 Returns
49 -------
50 llr : float
51 -2 x the log-likelihood of the nuisance parameters and the
52 hypothesized value of the parameter(s) of interest.
53 """
54 params[param_nums] = b0_vals
55 nuis_param_index = np.int_(np.delete(np.arange(nvar),
56 param_nums))
57 params[nuis_param_index] = nuisance_params
58 new_params = params.reshape(nvar, 1)
59 self.new_params = new_params
60 est_vect = exog * \
61 (endog - np.squeeze(np.dot(exog, new_params))).reshape(int(nobs), 1)
62 if not stochastic_exog:
63 exog_means = np.mean(exog, axis=0)[1:]
64 exog_mom2 = (np.sum(exog * exog, axis=0))[1:]\
65 / nobs
66 mean_est_vect = exog[:, 1:] - exog_means
67 mom2_est_vect = (exog * exog)[:, 1:] - exog_mom2
68 regressor_est_vect = np.concatenate((mean_est_vect, mom2_est_vect),
69 axis=1)
70 est_vect = np.concatenate((est_vect, regressor_est_vect),
71 axis=1)
73 wts = np.ones(int(nobs)) * (1. / nobs)
74 x0 = np.zeros(est_vect.shape[1]).reshape(-1, 1)
75 try:
76 eta_star = self._modif_newton(x0, est_vect, wts)
77 denom = 1. + np.dot(eta_star, est_vect.T)
78 self.new_weights = 1. / nobs * 1. / denom
79 # the following commented out code is to verify weights
80 # see open issue #1845
81 #self.new_weights /= self.new_weights.sum()
82 #if not np.allclose(self.new_weights.sum(), 1., rtol=0, atol=1e-10):
83 # raise RuntimeError('weights do not sum to 1')
84 llr = np.sum(np.log(nobs * self.new_weights))
85 return -2 * llr
86 except np.linalg.linalg.LinAlgError:
87 return np.inf