Python sklearn.linear_model.ElasticNetCV() Examples
The following are 13
code examples of sklearn.linear_model.ElasticNetCV().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.linear_model
, or try the search function
.
Example #1
Source File: regression.py From Building-Machine-Learning-Systems-With-Python-Second-Edition with MIT License | 6 votes |
def predict(train): binary = (train > 0) reg = ElasticNetCV(fit_intercept=True, alphas=[ 0.0125, 0.025, 0.05, .125, .25, .5, 1., 2., 4.]) norm = NormalizePositive() train = norm.fit_transform(train) filled = train.copy() # iterate over all users for u in range(train.shape[0]): # remove the current user for training curtrain = np.delete(train, u, axis=0) bu = binary[u] if np.sum(bu) > 5: reg.fit(curtrain[:,bu].T, train[u, bu]) # Fill the values that were not there already filled[u, ~bu] = reg.predict(curtrain[:,~bu].T) return norm.inverse_transform(filled)
Example #2
Source File: ensemble_glm.py From jh-kaggle-util with Apache License 2.0 | 6 votes |
def fit_ensemble(x,y): fit_type = jhkaggle.jhkaggle_config['FIT_TYPE'] if 1: if fit_type == jhkaggle.const.FIT_TYPE_BINARY_CLASSIFICATION: blend = SGDClassifier(loss="log", penalty="elasticnet") # LogisticRegression() else: # blend = SGDRegressor() #blend = LinearRegression() #blend = RandomForestRegressor(n_estimators=10, n_jobs=-1, max_depth=5, criterion='mae') blend = LassoLarsCV(normalize=True) #blend = ElasticNetCV(normalize=True) #blend = LinearRegression(normalize=True) blend.fit(x, y) else: blend = LogisticRegression() blend.fit(x, y) return blend
Example #3
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 6 votes |
def test_model_elastic_net_cv_regressor(self): model, X = fit_regression_model(linear_model.ElasticNetCV()) model_onnx = convert_sklearn( model, "scikit-learn elastic-net regression", [("input", FloatTensorType([None, X.shape[1]]))], ) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnElasticNetCV-Dec4", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #4
Source File: Booster.py From Fast-and-Accurate-Least-Mean-Squares-Solvers with MIT License | 5 votes |
def get_new_clf(solver, folds=3, alphas=100): kf=KFold(n_splits=folds,shuffle=False) if "linear" == solver: clf = linear_model.LinearRegression(fit_intercept=False) if "ridge" == solver: alphas = np.arange(1/alphas, 10+ 1/alphas, 10/alphas) clf = linear_model.RidgeCV(alphas=alphas, fit_intercept=False, cv=kf) elif "lasso" == solver: clf=linear_model.LassoCV(n_alphas=alphas, fit_intercept=False, cv=kf) elif "elastic" == solver: clf = linear_model.ElasticNetCV(n_alphas=alphas, fit_intercept=False, cv=kf) return clf
Example #5
Source File: scikit_wrapper.py From CatLearn with GNU General Public License v3.0 | 5 votes |
def _elast(self, tol=1e-4): """Function to do elastic net regression.""" regr = ElasticNetCV(fit_intercept=True, normalize=True, max_iter=self.iter, tol=tol) model = regr.fit(X=self.train_matrix, y=self.train_target) coeff = regr.coef_ # Make the linear prediction. pred = None if self.predict: data = model.predict(self.test_matrix) pred = get_error(prediction=data, target=self.test_target)['average'] return coeff, pred
Example #6
Source File: test_base.py From carl with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_decomposed_ratio(): components = [Normal(mu=0.0), Normal(mu=0.25), Normal(mu=0.5)] p0 = Mixture(components=components, weights=[0.45, 0.1, 0.45]) p1 = Mixture(components=[components[0]] + [components[2]]) ratio = DecomposedRatio( ClassifierRatio(CalibratedClassifierCV(base_estimator=ElasticNetCV()))) ratio.fit(numerator=p0, denominator=p1, n_samples=10000) reals = np.linspace(-0.5, 1.0, num=100).reshape(-1, 1) assert ratio.score(reals, p0.pdf(reals) / p1.pdf(reals)) > -0.1 assert np.mean(np.abs(np.log(ratio.predict(reals)) - ratio.predict(reals, log=True))) < 0.01
Example #7
Source File: test_base.py From carl with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_decomposed_ratio_identity(): components = [Normal(mu=0.0), Normal(mu=0.25), Normal(mu=0.5)] p = Mixture(components=components, weights=[0.45, 0.1, 0.45]) ratio = DecomposedRatio( ClassifierRatio(CalibratedClassifierCV(base_estimator=ElasticNetCV()))) ratio.fit(numerator=p, denominator=p, n_samples=10000) reals = np.linspace(-0.5, 1.0, num=100).reshape(-1, 1) assert ratio.score(reals, p.pdf(reals) / p.pdf(reals)) == 0.0 assert_array_almost_equal(ratio.predict(reals), np.ones(len(reals))) assert_array_almost_equal(ratio.predict(reals, log=True), np.zeros(len(reals)))
Example #8
Source File: test_classifier.py From carl with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_classifier_ratio(): for clf, calibration, cv in [(ElasticNetCV(), "histogram", 3), (GaussianNB(), "kde", 3), (ElasticNetCV(), "isotonic", 3), (GaussianNB(), "sigmoid", 3)]: yield check_classifier_ratio, clf, calibration, cv
Example #9
Source File: test_classifier.py From carl with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_classifier_ratio_identity(): p = Normal(mu=0.0) ratio = ClassifierRatio( CalibratedClassifierCV(base_estimator=ElasticNetCV())) ratio.fit(numerator=p, denominator=p, n_samples=10000) reals = np.linspace(-0.5, 1.0, num=100).reshape(-1, 1) assert ratio.score(reals, p.pdf(reals) / p.pdf(reals)) == 0.0 assert_array_almost_equal(ratio.predict(reals), np.ones(len(reals))) assert_array_almost_equal(ratio.predict(reals, log=True), np.zeros(len(reals)))
Example #10
Source File: gd_poisoners.py From manip-ml with MIT License | 5 votes |
def learn_model(self, x, y, clf, lam = None): if (lam is None and self.initlam != -1): lam = self.initlam if (clf is not None): if (lam is not None): clf = linear_model.ElasticNetCV(max_iter = 10000) clf.fit(x, y) lam = clf.alpha_ clf = linear_model.ElasticNet(alpha = lam, \ max_iter = 10000, \ warm_start = True) clf.fit(x, y) return clf, lam
Example #11
Source File: test_linear_model.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_objectmapper(self): df = pdml.ModelFrame([]) self.assertIs(df.linear_model.ARDRegression, lm.ARDRegression) self.assertIs(df.linear_model.BayesianRidge, lm.BayesianRidge) self.assertIs(df.linear_model.ElasticNet, lm.ElasticNet) self.assertIs(df.linear_model.ElasticNetCV, lm.ElasticNetCV) self.assertIs(df.linear_model.HuberRegressor, lm.HuberRegressor) self.assertIs(df.linear_model.Lars, lm.Lars) self.assertIs(df.linear_model.LarsCV, lm.LarsCV) self.assertIs(df.linear_model.Lasso, lm.Lasso) self.assertIs(df.linear_model.LassoCV, lm.LassoCV) self.assertIs(df.linear_model.LassoLars, lm.LassoLars) self.assertIs(df.linear_model.LassoLarsCV, lm.LassoLarsCV) self.assertIs(df.linear_model.LassoLarsIC, lm.LassoLarsIC) self.assertIs(df.linear_model.LinearRegression, lm.LinearRegression) self.assertIs(df.linear_model.LogisticRegression, lm.LogisticRegression) self.assertIs(df.linear_model.LogisticRegressionCV, lm.LogisticRegressionCV) self.assertIs(df.linear_model.MultiTaskLasso, lm.MultiTaskLasso) self.assertIs(df.linear_model.MultiTaskElasticNet, lm.MultiTaskElasticNet) self.assertIs(df.linear_model.MultiTaskLassoCV, lm.MultiTaskLassoCV) self.assertIs(df.linear_model.MultiTaskElasticNetCV, lm.MultiTaskElasticNetCV) self.assertIs(df.linear_model.OrthogonalMatchingPursuit, lm.OrthogonalMatchingPursuit) self.assertIs(df.linear_model.OrthogonalMatchingPursuitCV, lm.OrthogonalMatchingPursuitCV) self.assertIs(df.linear_model.PassiveAggressiveClassifier, lm.PassiveAggressiveClassifier) self.assertIs(df.linear_model.PassiveAggressiveRegressor, lm.PassiveAggressiveRegressor) self.assertIs(df.linear_model.Perceptron, lm.Perceptron) self.assertIs(df.linear_model.RandomizedLasso, lm.RandomizedLasso) self.assertIs(df.linear_model.RandomizedLogisticRegression, lm.RandomizedLogisticRegression) self.assertIs(df.linear_model.RANSACRegressor, lm.RANSACRegressor) self.assertIs(df.linear_model.Ridge, lm.Ridge) self.assertIs(df.linear_model.RidgeClassifier, lm.RidgeClassifier) self.assertIs(df.linear_model.RidgeClassifierCV, lm.RidgeClassifierCV) self.assertIs(df.linear_model.RidgeCV, lm.RidgeCV) self.assertIs(df.linear_model.SGDClassifier, lm.SGDClassifier) self.assertIs(df.linear_model.SGDRegressor, lm.SGDRegressor) self.assertIs(df.linear_model.TheilSenRegressor, lm.TheilSenRegressor)
Example #12
Source File: beamformers_electrodes_tweak.py From mmvt with GNU General Public License v3.0 | 5 votes |
def calc_optimization_features(optimization_method, freqs_bins, cond, meg_data_dic, elec_data, electrodes, from_t, to_t, optimization_params={}): # scorer = make_scorer(rol_corr, False) cv_parameters = [] if optimization_method in ['Ridge', 'RidgeCV', 'Lasso', 'LassoCV', 'ElasticNet', 'ElasticNetCV']: # vstack all meg data, such that X.shape = T*n X F, where n is the electrodes num # Y is T*n * 1 X = np.hstack((meg_data_dic[electrode][:, from_t:to_t] for electrode in electrodes)) Y = np.hstack((elec_data[electrode][cond][from_t:to_t] for electrode in electrodes)) funcs_dic = {'Ridge': Ridge(alpha=0.1), 'RidgeCV':RidgeCV(np.logspace(0, -10, 11)), # scoring=scorer 'Lasso': Lasso(alpha=1.0/X.shape[0]), 'LassoCV':LassoCV(alphas=np.logspace(0, -10, 11), max_iter=1000), 'ElasticNetCV': ElasticNetCV(alphas= np.logspace(0, -10, 11), l1_ratio=np.linspace(0, 1, 11))} clf = funcs_dic[optimization_method] clf.fit(X.T, Y) p = clf.coef_ if len(p) != len(freqs_bins): raise Exception('{} (len(clf.coef)) != {} (len(freqs_bin))!!!'.format(len(p), len(freqs_bins))) if optimization_method in ['RidgeCV', 'LassoCV']: cv_parameters = clf.alpha_ elif optimization_method == 'ElasticNetCV': cv_parameters = [clf.alpha_, clf.l1_ratio_] args = [(meg_pred(p, meg_data_dic[electrode][:, from_t:to_t]), elec_data[electrode][cond][from_t:to_t]) for electrode in electrodes] p0 = leastsq(post_ridge_err_func, [1], args=args, maxfev=0)[0] p = np.hstack((p0, p)) elif optimization_method in ['leastsq', 'dtw', 'minmax', 'diff_rms', 'rol_corr']: args = ([(meg_data_dic[electrode][:, from_t:to_t], elec_data[electrode][cond][from_t:to_t]) for electrode in electrodes], optimization_params) p0 = np.ones((1, len(freqs_bins)+1)) funcs_dic = {'leastsq': partial(leastsq, func=err_func, x0=p0, args=args), 'dtw': partial(minimize, fun=dtw_err_func, x0=p0, args=args), 'minmax': partial(minimize, fun=minmax_err_func, x0=p0, args=args), 'diff_rms': partial(minimize, fun=min_diff_rms_err_func, x0=p0, args=args), 'rol_corr': partial(minimize, fun=max_rol_corr, x0=p0, args=args)} res = funcs_dic[optimization_method]() p = res[0] if optimization_method=='leastsq' else res.x cv_parameters = optimization_params else: raise Exception('Unknown optimization_method! {}'.format(optimization_method)) return p, cv_parameters
Example #13
Source File: train.py From focus with GNU General Public License v3.0 | 4 votes |
def _train_enet(y, Z, X, include_ses=False, p_threshold=0.01): log = logging.getLogger(pyfocus.LOG) try: from limix.qc import normalise_covariance from sklearn.linear_model import ElasticNetCV except ImportError as ie: log.error("Training submodule requires limix>=2.0.0 and sklearn to be installed.") raise from scipy.linalg import lstsq log.debug("Initializing ElasticNet model") n = len(y) attrs = dict() K_cis = np.dot(Z, Z.T) K_cis = normalise_covariance(K_cis) fe_var, s2u, s2e, logl, fixed_betas, pval = _fit_cis_herit(y, K_cis, X) if pval > p_threshold: log.info("h2g pvalue {} greater than threshold {}. Skipping".format(pval, p_threshold)) return None h2g = s2u / (s2u + s2e + fe_var) attrs["h2g"] = h2g attrs["h2g.logl"] = logl attrs["h2g.pvalue"] = pval # we only want to penalize SNP effects and not covariate effects... fixed_betas, sum_resid, ranks, svals = lstsq(X, y) yresid = y - np.dot(X, fixed_betas) enet = ElasticNetCV(l1_ratio=0.5, fit_intercept=True, cv=5) enet.fit(Z, yresid) betas = enet.coef_ attrs["r2"] = enet.score(Z, yresid) attrs["resid.var"] = sum((yresid - enet.predict(Z)) ** 2) / (n - 1) if include_ses: # TODO: bootstrap? ses = None else: ses = None return betas, ses, attrs