Python statsmodels.api.add_constant() Examples
The following are 30
code examples of statsmodels.api.add_constant().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
statsmodels.api
, or try the search function
.
Example #1
Source File: test_glm.py From vnpy_crypto with MIT License | 7 votes |
def setup_class(cls): ''' Test Gaussian family with canonical identity link ''' # Test Precisions cls.decimal_resids = DECIMAL_3 cls.decimal_params = DECIMAL_2 cls.decimal_bic = DECIMAL_0 cls.decimal_bse = DECIMAL_3 from statsmodels.datasets.longley import load cls.data = load() cls.data.exog = add_constant(cls.data.exog, prepend=False) cls.res1 = GLM(cls.data.endog, cls.data.exog, family=sm.families.Gaussian()).fit() from .results.results_glm import Longley cls.res2 = Longley()
Example #2
Source File: test_regressionplots.py From vnpy_crypto with MIT License | 7 votes |
def setup(self): nsample = 100 sig = 0.5 x1 = np.linspace(0, 20, nsample) x2 = 5 + 3* np.random.randn(nsample) X = np.c_[x1, x2, np.sin(0.5*x1), (x2-5)**2, np.ones(nsample)] beta = [0.5, 0.5, 1, -0.04, 5.] y_true = np.dot(X, beta) y = y_true + sig * np.random.normal(size=nsample) exog0 = sm.add_constant(np.c_[x1, x2], prepend=False) exog0 = DataFrame(exog0, columns=["const", "var1", "var2"]) y = Series(y, name="outcome") res = sm.OLS(y, exog0).fit() self.res = res data = DataFrame(exog0, columns=["const", "var1", "var2"]) data['y'] = y self.data = data
Example #3
Source File: test_count_model.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): data = sm.datasets.randhie.load() cls.endog = data.endog exog = sm.add_constant(data.exog[:,1], prepend=False) exog_infl = sm.add_constant(data.exog[:,0], prepend=False) # cheating for now, parameters are not well identified in this dataset # see https://github.com/statsmodels/statsmodels/pull/3928#issuecomment-331724022 sp = np.array([1.88, -10.28, -0.20, 1.14, 1.34]) cls.res1 = sm.ZeroInflatedNegativeBinomialP(data.endog, exog, exog_infl=exog_infl, p=2).fit(start_params=sp, method='nm', xtol=1e-6, maxiter=5000, disp=0) # for llnull test cls.res1._results._attach_nullmodel = True cls.init_keys = ['exog_infl', 'exposure', 'inflation', 'offset', 'p'] cls.init_kwds = {'inflation': 'logit', 'p': 2} res2 = RandHIE() res2.zero_inflated_negative_binomial() cls.res2 = res2
Example #4
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def test_margeff_dummy(self): data = self.data vote = data.data['vote'] exog = np.column_stack((data.exog, vote)) exog = sm.add_constant(exog, prepend=False) res = MNLogit(data.endog, exog).fit(method="newton", disp=0) me = res.get_margeff(dummy=True) assert_almost_equal(me.margeff, self.res2.margeff_dydx_dummy_overall, 6) assert_almost_equal(me.margeff_se, self.res2.margeff_dydx_dummy_overall_se, 6) me = res.get_margeff(dummy=True, method="eydx") assert_almost_equal(me.margeff, self.res2.margeff_eydx_dummy_overall, 5) assert_almost_equal(me.margeff_se, self.res2.margeff_eydx_dummy_overall_se, 6)
Example #5
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def test_poisson_newton(): #GH: 24, Newton doesn't work well sometimes nobs = 10000 np.random.seed(987689) x = np.random.randn(nobs, 3) x = sm.add_constant(x, prepend=True) y_count = np.random.poisson(np.exp(x.sum(1))) mod = sm.Poisson(y_count, x) from pandas.util.testing import assert_produces_warning # this is not thread-safe # py 2.7 and 3.3 don't raise here anymore #4235 import sys PY3_g3 = sys.version_info[:2] > (3, 3) if PY3_g3: with assert_produces_warning(): warnings.simplefilter('always') res = mod.fit(start_params=-np.ones(4), method='newton', disp=0) else: res = mod.fit(start_params=-np.ones(4), method='newton', disp=0) assert_(not res.mle_retvals['converged'])
Example #6
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def test_issue_339(): # make sure MNLogit summary works for J != K. data = sm.datasets.anes96.load() exog = data.exog # leave out last exog column exog = exog[:,:-1] exog = sm.add_constant(exog, prepend=True) res1 = sm.MNLogit(data.endog, exog).fit(method="newton", disp=0) # strip the header from the test smry = "\n".join(res1.summary().as_text().split('\n')[9:]) cur_dir = os.path.dirname(os.path.abspath(__file__)) test_case_file = os.path.join(cur_dir, 'results', 'mn_logit_summary.txt') with open(test_case_file, 'r') as fd: test_case = fd.read() np.testing.assert_equal(smry, test_case[:-1]) # smoke test for summary2 res1.summary2() # see #3651
Example #7
Source File: test_glm.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): ''' Tests Gamma family with canonical inverse link (power -1) ''' # Test Precisions cls.decimal_aic_R = -1 #TODO: off by about 1, we are right with Stata cls.decimal_resids = DECIMAL_2 from statsmodels.datasets.scotland import load from .results.results_glm import Scotvote data = load() data.exog = add_constant(data.exog, prepend=False) with warnings.catch_warnings(): warnings.simplefilter("ignore") res1 = GLM(data.endog, data.exog, family=sm.families.Gamma()).fit() cls.res1 = res1 # res2 = RModel(data.endog, data.exog, r.glm, family=r.Gamma) res2 = Scotvote() res2.aic_R += 2 # R doesn't count degree of freedom for scale with gamma cls.res2 = res2
Example #8
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): cls.kvars = 10 # Number of variables cls.m = 7 # Number of unregularized parameters rand_data = sm.datasets.randhie.load() rand_exog = rand_data.exog.view(float).reshape(len(rand_data.exog), -1) rand_exog = sm.add_constant(rand_exog, prepend=True) # Drop some columns and do an unregularized fit exog_no_PSI = rand_exog[:, :cls.m] mod_unreg = sm.NegativeBinomial(rand_data.endog, exog_no_PSI, loglike_method='geometric') cls.res_unreg = mod_unreg.fit(method="newton", disp=False) # Do a regularized fit with alpha, effectively dropping the last columns alpha = 10 * len(rand_data.endog) * np.ones(cls.kvars) alpha[:cls.m] = 0 mod_reg = sm.NegativeBinomial(rand_data.endog, rand_exog, loglike_method='geometric') cls.res_reg = mod_reg.fit_regularized( method='l1', alpha=alpha, disp=False, acc=1e-10, maxiter=2000, trim_mode='auto') assert_equal(mod_reg.loglike_method, 'geometric')
Example #9
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): cls.kvars = 10 # Number of variables cls.m = 7 # Number of unregularized parameters rand_data = sm.datasets.randhie.load() rand_exog = rand_data.exog.view(float).reshape(len(rand_data.exog), -1) rand_exog = sm.add_constant(rand_exog, prepend=True) # Drop some columns and do an unregularized fit exog_no_PSI = rand_exog[:, :cls.m] mod_unreg = sm.Poisson(rand_data.endog, exog_no_PSI) cls.res_unreg = mod_unreg.fit(method="newton", disp=False) # Do a regularized fit with alpha, effectively dropping the last column alpha = 10 * len(rand_data.endog) * np.ones(cls.kvars) alpha[:cls.m] = 0 cls.res_reg = sm.Poisson(rand_data.endog, rand_exog).fit_regularized( method='l1', alpha=alpha, disp=False, acc=1e-10, maxiter=2000, trim_mode='auto')
Example #10
Source File: test_glm.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): ''' Test Gaussian family with canonical identity link ''' # Test Precisions cls.decimal_resids = DECIMAL_3 cls.decimal_params = DECIMAL_2 cls.decimal_bic = DECIMAL_0 cls.decimal_bse = DECIMAL_3 from statsmodels.datasets.longley import load cls.data = load() cls.data.exog = add_constant(cls.data.exog, prepend=False) params = sm.OLS(cls.data.endog, cls.data.exog).fit().params cls.res1 = GLM(cls.data.endog, cls.data.exog, family=sm.families.Gaussian()).fit(start_params=params) from .results.results_glm import Longley cls.res2 = Longley()
Example #11
Source File: test_rlm.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): from statsmodels.datasets.stackloss import load cls.data = load() cls.data.exog = sm.add_constant(cls.data.exog, prepend=False) results = RLM(cls.data.endog, cls.data.exog,\ M=sm.robust.norms.HuberT()).fit(scale_est=\ sm.robust.scale.HuberScale()) h2 = RLM(cls.data.endog, cls.data.exog,\ M=sm.robust.norms.HuberT()).fit(cov="H2", scale_est=sm.robust.scale.HuberScale()).bcov_scaled h3 = RLM(cls.data.endog, cls.data.exog,\ M=sm.robust.norms.HuberT()).fit(cov="H3", scale_est=sm.robust.scale.HuberScale()).bcov_scaled cls.res1 = results cls.res1.h2 = h2 cls.res1.h3 = h3
Example #12
Source File: test_rlm.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): from statsmodels.datasets.stackloss import load cls.data = load() # class attributes for subclasses cls.data.exog = sm.add_constant(cls.data.exog, prepend=False) # Test precisions cls.decimal_standarderrors = DECIMAL_1 cls.decimal_scale = DECIMAL_3 results = RLM(cls.data.endog, cls.data.exog,\ M=sm.robust.norms.HuberT()).fit(conv='sresid') # default M h2 = RLM(cls.data.endog, cls.data.exog,\ M=sm.robust.norms.HuberT()).fit(cov="H2").bcov_scaled h3 = RLM(cls.data.endog, cls.data.exog,\ M=sm.robust.norms.HuberT()).fit(cov="H3").bcov_scaled cls.res1 = results cls.res1.h2 = h2 cls.res1.h3 = h3
Example #13
Source File: factormodels.py From vnpy_crypto with MIT License | 6 votes |
def calc_factors(self, x=None, keepdim=0, addconst=True): '''get factor decomposition of exogenous variables This uses principal component analysis to obtain the factors. The number of factors kept is the maximum that will be considered in the regression. ''' if x is None: x = self.exog else: x = np.asarray(x) xred, fact, evals, evecs = pca(x, keepdim=keepdim, normalize=1) self.exog_reduced = xred #self.factors = fact if addconst: self.factors = sm.add_constant(fact, prepend=True) self.hasconst = 1 #needs to be int else: self.factors = fact self.hasconst = 0 #needs to be int self.evals = evals self.evecs = evecs
Example #14
Source File: wrappers.py From vnpy_crypto with MIT License | 6 votes |
def linmod(y, x, weights=None, sigma=None, add_const=True, filter_missing=True, **kwds): '''get linear model with extra options for entry dispatches to regular model class and does not wrap the output If several options are exclusive, for example sigma and weights, then the chosen class depends on the implementation sequence. ''' if filter_missing: y, x = remove_nanrows(y, x) #do the same for masked arrays if add_const: x = sm.add_constant(x, prepend=True) if not sigma is None: return GLS(y, x, sigma=sigma, **kwds) elif not weights is None: return WLS(y, x, weights=weights, **kwds) else: return OLS(y, x, **kwds)
Example #15
Source File: test_poisson.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): # generate artificial data np.random.seed(98765678) nobs = 200 rvs = np.random.randn(nobs,6) data_exog = rvs data_exog = sm.add_constant(data_exog, prepend=False) xbeta = 0.1 + 0.1*rvs.sum(1) data_endog = np.random.poisson(np.exp(xbeta)) #estimate discretemod.Poisson as benchmark cls.res_discrete = Poisson(data_endog, data_exog).fit(disp=0) mod_glm = sm.GLM(data_endog, data_exog, family=sm.families.Poisson()) cls.res_glm = mod_glm.fit() #estimate generic MLE cls.mod = PoissonGMLE(data_endog, data_exog) cls.res = cls.mod.fit(start_params=0.9 * cls.res_discrete.params, method='bfgs', disp=0)
Example #16
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): data = sm.datasets.spector.load() data.exog = sm.add_constant(data.exog, prepend=False) res2 = Spector() res2.probit() cls.res2 = res2 # fmin_cg fails to converge on some machines - reparameterize from statsmodels.tools.transform_model import StandardizeTransform transf = StandardizeTransform(data.exog) exog_st = transf(data.exog) res1_st = Probit(data.endog, exog_st).fit(method="cg", disp=0, maxiter=1000, gtol=1e-08) start_params = transf.transform_params(res1_st.params) assert_allclose(start_params, res2.params, rtol=1e-5, atol=1e-6) cls.res1 = Probit(data.endog, data.exog).fit(start_params=start_params, method="cg", maxiter=1000, gtol=1e-05, disp=0) assert_array_less(cls.res1.mle_retvals['fcalls'], 100)
Example #17
Source File: test_gee.py From vnpy_crypto with MIT License | 6 votes |
def test_stationary_grid(self): endog = np.r_[4, 2, 3, 1, 4, 5, 6, 7, 8, 3, 2, 4.] exog = np.r_[2, 3, 1, 4, 3, 2, 5, 4, 5, 6, 3, 2] group = np.r_[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3] exog = sm.add_constant(exog) cs = Stationary(max_lag=2, grid=True) model = sm.GEE(endog, exog, group, cov_struct=cs) result = model.fit() se = result.bse * np.sqrt(12 / 9.) # Stata adjustment assert_allclose(cs.covariance_matrix(np.r_[1, 1, 1], 0)[0].sum(), 6.4633538285149452) # Obtained from Stata using: # xtgee y x, i(g) vce(robust) corr(Stationary2) assert_allclose(result.params, np.r_[ 4.463968, -0.0386674], rtol=1e-5, atol=1e-5) assert_allclose(se, np.r_[0.5217202, 0.2800333], rtol=1e-5, atol=1e-5)
Example #18
Source File: test_count_model.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): data = sm.datasets.randhie.load() cls.endog = data.endog exog = sm.add_constant(data.exog[:,1:4], prepend=False) exog_infl = sm.add_constant(data.exog[:,0], prepend=False) cls.res1 = sm.ZeroInflatedPoisson(data.endog, exog, exog_infl=exog_infl, offset=data.exog[:,7]).fit(method='newton', maxiter=500, disp=0) # for llnull test cls.res1._results._attach_nullmodel = True cls.init_keys = ['exog_infl', 'exposure', 'inflation', 'offset'] cls.init_kwds = {'inflation': 'logit'} res2 = RandHIE() res2.zero_inflated_poisson_offset() cls.res2 = res2
Example #19
Source File: test_count_model.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): data = sm.datasets.randhie.load_pandas() cls.endog = data.endog cls.data = data exog = sm.add_constant(data.exog.iloc[:,1:4], prepend=False) exog_infl = sm.add_constant(data.exog.iloc[:,0], prepend=False) # we don't need to verify convergence here start_params = np.asarray([0.10337834587498942, -1.0459825102508549, -0.08219794475894268, 0.00856917434709146, -0.026795737379474334, 1.4823632430107334]) model = sm.ZeroInflatedPoisson(data.endog, exog, exog_infl=exog_infl, inflation='logit') cls.res1 = model.fit(start_params=start_params, method='newton', maxiter=500, disp=0) # for llnull test cls.res1._results._attach_nullmodel = True cls.init_keys = ['exog_infl', 'exposure', 'inflation', 'offset'] cls.init_kwds = {'inflation': 'logit'} res2 = RandHIE() res2.zero_inflated_poisson_logit() cls.res2 = res2
Example #20
Source File: test_discrete.py From vnpy_crypto with MIT License | 6 votes |
def setup_class(cls): cls.kvars = 10 # Number of variables cls.m = 7 # Number of unregularized parameters rand_data = sm.datasets.randhie.load() rand_exog = rand_data.exog.view(float).reshape(len(rand_data.exog), -1) rand_exog_st = (rand_exog - rand_exog.mean(0)) / rand_exog.std(0) rand_exog = sm.add_constant(rand_exog_st, prepend=True) # Drop some columns and do an unregularized fit exog_no_PSI = rand_exog[:, :cls.m] mod_unreg = sm.NegativeBinomial(rand_data.endog, exog_no_PSI) cls.res_unreg = mod_unreg.fit(method="newton", disp=False) # Do a regularized fit with alpha, effectively dropping the last column alpha = 10 * len(rand_data.endog) * np.ones(cls.kvars + 1) alpha[:cls.m] = 0 alpha[-1] = 0 # don't penalize alpha mod_reg = sm.NegativeBinomial(rand_data.endog, rand_exog) cls.res_reg = mod_reg.fit_regularized( method='l1', alpha=alpha, disp=False, acc=1e-10, maxiter=2000, trim_mode='auto') cls.k_extra = 1 # 1 extra parameter in nb2
Example #21
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.spector.load() data.exog = sm.add_constant(data.exog, prepend=False) res2 = Spector() res2.probit() cls.res2 = res2 cls.res1 = Probit(data.endog, data.exog).fit(method="ncg", disp=0, avextol=1e-8, warn_convergence=False) # converges close enough but warnflag is 2 for precision loss
Example #22
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.spector.load() data.exog = sm.add_constant(data.exog, prepend=False) res2 = Spector() res2.probit() cls.res2 = res2 cls.res1 = Probit(data.endog, data.exog).fit(method="nm", disp=0, maxiter=500)
Example #23
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.spector.load() data.exog = sm.add_constant(data.exog, prepend=False) cls.res1 = Probit(data.endog, data.exog).fit(method="bfgs", disp=0) res2 = Spector() res2.probit() cls.res2 = res2
Example #24
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def test_poisson_predict(): #GH: 175, make sure poisson predict works without offset and exposure data = sm.datasets.randhie.load() exog = sm.add_constant(data.exog, prepend=True) res = sm.Poisson(data.endog, exog).fit(method='newton', disp=0) pred1 = res.predict() pred2 = res.predict(exog) assert_almost_equal(pred1, pred2) #exta options pred3 = res.predict(exog, offset=0, exposure=1) assert_almost_equal(pred1, pred3) pred3 = res.predict(exog, offset=0, exposure=2) assert_almost_equal(2*pred1, pred3) pred3 = res.predict(exog, offset=np.log(2), exposure=1) assert_almost_equal(2*pred1, pred3)
Example #25
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.anes96.load() cls.data = data exog = data.exog exog = sm.add_constant(exog, prepend=False) cls.res1 = MNLogit(data.endog, exog).fit(method="newton", disp=0) res2 = Anes() res2.mnlogit_basezero() cls.res2 = res2
Example #26
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.spector.load() data.exog = sm.add_constant(data.exog, prepend=False) res2 = Spector() res2.probit() cls.res2 = res2 fit = Probit(data.endog, data.exog).fit cls.res1 = fit(method="minimize", disp=0, niter=5, tol = 1e-8)
Example #27
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): if not has_dogleg: raise SkipTest("Skipped TestProbitMinimizeDogleg since " "dogleg method is not available") data = sm.datasets.spector.load() data.exog = sm.add_constant(data.exog, prepend=False) res2 = Spector() res2.probit() cls.res2 = res2 fit = Probit(data.endog, data.exog).fit cls.res1 = fit(method="minimize", disp=0, niter=5, tol = 1e-8, min_method = 'dogleg')
Example #28
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.randhie.load() exog = sm.add_constant(data.exog, prepend=False) mod = NegativeBinomial(data.endog, exog, 'geometric') cls.res1 = mod.fit(method='bfgs', disp=0) res2 = RandHIE() res2.negativebinomial_geometric_bfgs() cls.res2 = res2 # the following are regression tests, could be inherited instead
Example #29
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.randhie.load() exog = sm.add_constant(data.exog, prepend=False) cls.res1 = NegativeBinomial(data.endog, exog, 'nb1').fit( method="newton", maxiter=100, disp=0) res2 = RandHIE() res2.negativebinomial_nb1_bfgs() cls.res2 = res2
Example #30
Source File: test_discrete.py From vnpy_crypto with MIT License | 5 votes |
def setup_class(cls): data = sm.datasets.randhie.load() exog = sm.add_constant(data.exog, prepend=False) cls.res1 = NegativeBinomial(data.endog, exog, 'nb1').fit(method="bfgs", maxiter=100, disp=0) res2 = RandHIE() res2.negativebinomial_nb1_bfgs() cls.res2 = res2