Python sklearn.linear_model.ElasticNet() Examples
The following are 23
code examples of sklearn.linear_model.ElasticNet().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.linear_model
, or try the search function
.

Example #1
Source File: friedman_scores.py From mlens with MIT License | 7 votes |
def build_ensemble(**kwargs): """Generate ensemble.""" ens = SuperLearner(**kwargs) prep = {'Standard Scaling': [StandardScaler()], 'Min Max Scaling': [MinMaxScaler()], 'No Preprocessing': []} est = {'Standard Scaling': [ElasticNet(), Lasso(), KNeighborsRegressor()], 'Min Max Scaling': [SVR()], 'No Preprocessing': [RandomForestRegressor(random_state=SEED), GradientBoostingRegressor()]} ens.add(est, prep) ens.add(GradientBoostingRegressor(), meta=True) return ens
Example #2
Source File: test_birch.py From twitter-stock-recommendation with MIT License | 6 votes |
def test_n_clusters(): # Test that n_clusters param works properly X, y = make_blobs(n_samples=100, centers=10) brc1 = Birch(n_clusters=10) brc1.fit(X) assert_greater(len(brc1.subcluster_centers_), 10) assert_equal(len(np.unique(brc1.labels_)), 10) # Test that n_clusters = Agglomerative Clustering gives # the same results. gc = AgglomerativeClustering(n_clusters=10) brc2 = Birch(n_clusters=gc) brc2.fit(X) assert_array_equal(brc1.subcluster_labels_, brc2.subcluster_labels_) assert_array_equal(brc1.labels_, brc2.labels_) # Test that the wrong global clustering step raises an Error. clf = ElasticNet() brc3 = Birch(n_clusters=clf) assert_raises(ValueError, brc3.fit, X) # Test that a small number of clusters raises a warning. brc4 = Birch(threshold=10000.) assert_warns(UserWarning, brc4.fit, X)
Example #3
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 6 votes |
def test_model_elastic_net_regressor_bool(self): model, X = fit_regression_model( linear_model.ElasticNet(), is_bool=True) model_onnx = convert_sklearn( model, "elastic net regression", [("input", BooleanTensorType([None, X.shape[1]]))]) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnElasticNetRegressorBool", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #4
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 6 votes |
def test_model_elastic_net_regressor(self): model, X = fit_regression_model(linear_model.ElasticNet()) model_onnx = convert_sklearn( model, "scikit-learn elastic-net regression", [("input", FloatTensorType([None, X.shape[1]]))], ) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnElasticNet-Dec4", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #5
Source File: test_birch.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def test_n_clusters(): # Test that n_clusters param works properly X, y = make_blobs(n_samples=100, centers=10) brc1 = Birch(n_clusters=10) brc1.fit(X) assert_greater(len(brc1.subcluster_centers_), 10) assert_equal(len(np.unique(brc1.labels_)), 10) # Test that n_clusters = Agglomerative Clustering gives # the same results. gc = AgglomerativeClustering(n_clusters=10) brc2 = Birch(n_clusters=gc) brc2.fit(X) assert_array_equal(brc1.subcluster_labels_, brc2.subcluster_labels_) assert_array_equal(brc1.labels_, brc2.labels_) # Test that the wrong global clustering step raises an Error. clf = ElasticNet() brc3 = Birch(n_clusters=clf) assert_raises(ValueError, brc3.fit, X) # Test that a small number of clusters raises a warning. brc4 = Birch(threshold=10000.) assert_warns(ConvergenceWarning, brc4.fit, X)
Example #6
Source File: __init__.py From sklearn2pmml with GNU Affero General Public License v3.0 | 5 votes |
def test_lm(self): _checkLM(ElasticNet()) _checkLM(LinearRegression()) _checkLM(SGDRegressor())
Example #7
Source File: TermDocMatrix.py From scattertext with Apache License 2.0 | 5 votes |
def get_regression_coefs(self, category, clf=ElasticNet()): ''' Computes regression score of tdfidf transformed features Parameters ---------- category : str category name to score clf : sklearn regressor Returns ------- coefficient array ''' self._fit_tfidf_model(category, clf) return clf.coef_
Example #8
Source File: test_sgd.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_elasticnet_convergence(self): # Check that the SGD output is consistent with coordinate descent n_samples, n_features = 1000, 5 rng = np.random.RandomState(0) X = rng.randn(n_samples, n_features) # ground_truth linear model that generate y from X and to which the # models should converge if the regularizer would be set to 0.0 ground_truth_coef = rng.randn(n_features) y = np.dot(X, ground_truth_coef) # XXX: alpha = 0.1 seems to cause convergence problems for alpha in [0.01, 0.001]: for l1_ratio in [0.5, 0.8, 1.0]: cd = linear_model.ElasticNet(alpha=alpha, l1_ratio=l1_ratio, fit_intercept=False) cd.fit(X, y) sgd = self.factory(penalty='elasticnet', max_iter=50, alpha=alpha, l1_ratio=l1_ratio, fit_intercept=False) sgd.fit(X, y) err_msg = ("cd and sgd did not converge to comparable " "results for alpha=%f and l1_ratio=%f" % (alpha, l1_ratio)) assert_almost_equal(cd.coef_, sgd.coef_, decimal=2, err_msg=err_msg)
Example #9
Source File: poiRegression.py From python-urbanPlanning with MIT License | 5 votes |
def regularization_m(X_re,y_re,predFeat=False): n_alphas=200 alphas=np.logspace(1, 8, n_alphas) coefs=[] n=0 for a in alphas: n+=1 ridge=Ridge(alpha=a, fit_intercept=False) ridge.fit(X_re,y_re) coefs.append(ridge.coef_) # print(n,coefs) ax = plt.gca() ax.plot(alphas, coefs) ax.set_xscale('log') ax.set_xlim(ax.get_xlim()[::-1]) # reverse axis plt.xlabel('alpha') plt.ylabel('weights') plt.title('Ridge coefficients as a function of the regularization') plt.axis('tight') plt.show() ridge=Ridge(alpha=28.6) #Ridge预先确定a值 ridge.fit(X_re,y_re) print(ridge.coef_,ridge.intercept_,ridge.alpha) redgecv=RidgeCV(alphas=alphas) #输入多个a值,模型自行择优选取 redgecv.fit(X_re,y_re) print(redgecv.coef_,redgecv.intercept_,redgecv.alpha_) lasso=Lasso(alpha=0.01) lasso.fit(X_re,y_re) print(lasso.coef_,lasso.intercept_ ,lasso.alpha) elasticnet=ElasticNet(alpha=1.0,l1_ratio=0.5) elasticnet.fit(X_re,y_re) print(elasticnet.coef_,elasticnet.intercept_ ,elasticnet.alpha) if type(predFeat).__module__=='numpy': return redgecv.predict(predFeat)
Example #10
Source File: test_linear_model.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_objectmapper(self): df = pdml.ModelFrame([]) self.assertIs(df.linear_model.ARDRegression, lm.ARDRegression) self.assertIs(df.linear_model.BayesianRidge, lm.BayesianRidge) self.assertIs(df.linear_model.ElasticNet, lm.ElasticNet) self.assertIs(df.linear_model.ElasticNetCV, lm.ElasticNetCV) self.assertIs(df.linear_model.HuberRegressor, lm.HuberRegressor) self.assertIs(df.linear_model.Lars, lm.Lars) self.assertIs(df.linear_model.LarsCV, lm.LarsCV) self.assertIs(df.linear_model.Lasso, lm.Lasso) self.assertIs(df.linear_model.LassoCV, lm.LassoCV) self.assertIs(df.linear_model.LassoLars, lm.LassoLars) self.assertIs(df.linear_model.LassoLarsCV, lm.LassoLarsCV) self.assertIs(df.linear_model.LassoLarsIC, lm.LassoLarsIC) self.assertIs(df.linear_model.LinearRegression, lm.LinearRegression) self.assertIs(df.linear_model.LogisticRegression, lm.LogisticRegression) self.assertIs(df.linear_model.LogisticRegressionCV, lm.LogisticRegressionCV) self.assertIs(df.linear_model.MultiTaskLasso, lm.MultiTaskLasso) self.assertIs(df.linear_model.MultiTaskElasticNet, lm.MultiTaskElasticNet) self.assertIs(df.linear_model.MultiTaskLassoCV, lm.MultiTaskLassoCV) self.assertIs(df.linear_model.MultiTaskElasticNetCV, lm.MultiTaskElasticNetCV) self.assertIs(df.linear_model.OrthogonalMatchingPursuit, lm.OrthogonalMatchingPursuit) self.assertIs(df.linear_model.OrthogonalMatchingPursuitCV, lm.OrthogonalMatchingPursuitCV) self.assertIs(df.linear_model.PassiveAggressiveClassifier, lm.PassiveAggressiveClassifier) self.assertIs(df.linear_model.PassiveAggressiveRegressor, lm.PassiveAggressiveRegressor) self.assertIs(df.linear_model.Perceptron, lm.Perceptron) self.assertIs(df.linear_model.RandomizedLasso, lm.RandomizedLasso) self.assertIs(df.linear_model.RandomizedLogisticRegression, lm.RandomizedLogisticRegression) self.assertIs(df.linear_model.RANSACRegressor, lm.RANSACRegressor) self.assertIs(df.linear_model.Ridge, lm.Ridge) self.assertIs(df.linear_model.RidgeClassifier, lm.RidgeClassifier) self.assertIs(df.linear_model.RidgeClassifierCV, lm.RidgeClassifierCV) self.assertIs(df.linear_model.RidgeCV, lm.RidgeCV) self.assertIs(df.linear_model.SGDClassifier, lm.SGDClassifier) self.assertIs(df.linear_model.SGDRegressor, lm.SGDRegressor) self.assertIs(df.linear_model.TheilSenRegressor, lm.TheilSenRegressor)
Example #11
Source File: friedman_memory.py From mlens with MIT License | 5 votes |
def build_ensemble(**kwargs): """Generate ensemble.""" ens = SuperLearner(**kwargs) est = [ElasticNet(copy_X=False), Lasso(copy_X=False)] ens.add(est) ens.add(KNeighborsRegressor()) return ens
Example #12
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 5 votes |
def test_model_elastic_net_regressor_int(self): model, X = fit_regression_model(linear_model.ElasticNet(), is_int=True) model_onnx = convert_sklearn( model, "elastic net regression", [("input", Int64TensorType([None, X.shape[1]]))]) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnElasticNetRegressorInt-Dec4", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #13
Source File: test_sgd.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_elasticnet_convergence(klass): # Check that the SGD output is consistent with coordinate descent n_samples, n_features = 1000, 5 rng = np.random.RandomState(0) X = rng.randn(n_samples, n_features) # ground_truth linear model that generate y from X and to which the # models should converge if the regularizer would be set to 0.0 ground_truth_coef = rng.randn(n_features) y = np.dot(X, ground_truth_coef) # XXX: alpha = 0.1 seems to cause convergence problems for alpha in [0.01, 0.001]: for l1_ratio in [0.5, 0.8, 1.0]: cd = linear_model.ElasticNet(alpha=alpha, l1_ratio=l1_ratio, fit_intercept=False) cd.fit(X, y) sgd = klass(penalty='elasticnet', max_iter=50, alpha=alpha, l1_ratio=l1_ratio, fit_intercept=False) sgd.fit(X, y) err_msg = ("cd and sgd did not converge to comparable " "results for alpha=%f and l1_ratio=%f" % (alpha, l1_ratio)) assert_almost_equal(cd.coef_, sgd.coef_, decimal=2, err_msg=err_msg)
Example #14
Source File: test_doublyrobust.py From causallib with Apache License 2.0 | 5 votes |
def ensure_many_models(self): from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor from sklearn.neural_network import MLPRegressor from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor from sklearn.neighbors import KNeighborsRegressor from sklearn.svm import SVR, LinearSVR from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.exceptions import ConvergenceWarning warnings.filterwarnings('ignore', category=ConvergenceWarning) data = self.create_uninformative_ox_dataset() for propensity_learner in [GradientBoostingClassifier(n_estimators=10), RandomForestClassifier(n_estimators=100), MLPClassifier(hidden_layer_sizes=(5,)), KNeighborsClassifier(n_neighbors=20)]: weight_model = IPW(propensity_learner) propensity_learner_name = str(propensity_learner).split("(", maxsplit=1)[0] for outcome_learner in [GradientBoostingRegressor(n_estimators=10), RandomForestRegressor(n_estimators=10), MLPRegressor(hidden_layer_sizes=(5,)), ElasticNet(), RANSACRegressor(), HuberRegressor(), PassiveAggressiveRegressor(), KNeighborsRegressor(), SVR(), LinearSVR()]: outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0] outcome_model = Standardization(outcome_learner) with self.subTest("Test fit & predict using {} & {}".format(propensity_learner_name, outcome_learner_name)): model = self.estimator.__class__(outcome_model, weight_model) model.fit(data["X"], data["a"], data["y"], refit_weight_model=False) model.estimate_individual_outcome(data["X"], data["a"]) self.assertTrue(True) # Fit did not crash
Example #15
Source File: test_codec.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def test_ElasticNet(self): ElasticNetAlgo.register_codecs() self.regressor_util(ElasticNet)
Example #16
Source File: ElasticNet.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, options): self.handle_options(options) out_params = convert_params( options.get('params', {}), bools=['fit_intercept', 'normalize'], floats=['alpha', 'l1_ratio'], ) if 'l1_ratio' in out_params: if out_params['l1_ratio'] < 0 or out_params['l1_ratio'] > 1: raise RuntimeError('l1_ratio must be >= 0 and <= 1') self.estimator = _ElasticNet(**out_params)
Example #17
Source File: gd_poisoners.py From manip-ml with MIT License | 5 votes |
def learn_model(self, x, y, clf, lam = None): if (lam is None and self.initlam != -1): lam = self.initlam if (clf is not None): if (lam is not None): clf = linear_model.ElasticNetCV(max_iter = 10000) clf.fit(x, y) lam = clf.alpha_ clf = linear_model.ElasticNet(alpha = lam, \ max_iter = 10000, \ warm_start = True) clf.fit(x, y) return clf, lam
Example #18
Source File: test_pyglmnet.py From pyglmnet with MIT License | 5 votes |
def test_compare_sklearn(solver): """Test results against sklearn.""" def rmse(a, b): return np.sqrt(np.mean((a - b) ** 2)) X, Y, coef_ = make_regression( n_samples=1000, n_features=1000, noise=0.1, n_informative=10, coef=True, random_state=42) alpha = 0.1 l1_ratio = 0.5 clf = ElasticNet(alpha=alpha, l1_ratio=l1_ratio, tol=1e-5) clf.fit(X, Y) glm = GLM(distr='gaussian', alpha=l1_ratio, reg_lambda=alpha, solver=solver, tol=1e-5, max_iter=70) glm.fit(X, Y) y_sk = clf.predict(X) y_pg = glm.predict(X) assert abs(rmse(Y, y_sk) - rmse(Y, y_pg)) < 1.0 glm = GLM(distr='gaussian', alpha=l1_ratio, reg_lambda=alpha, solver=solver, tol=1e-5, max_iter=5, fit_intercept=False) glm.fit(X, Y) assert glm.beta0_ == 0. glm.predict(X)
Example #19
Source File: scikitlearn.py From sia-cog with MIT License | 5 votes |
def getModels(): result = [] result.append("LinearRegression") result.append("BayesianRidge") result.append("ARDRegression") result.append("ElasticNet") result.append("HuberRegressor") result.append("Lasso") result.append("LassoLars") result.append("Rigid") result.append("SGDRegressor") result.append("SVR") result.append("MLPClassifier") result.append("KNeighborsClassifier") result.append("SVC") result.append("GaussianProcessClassifier") result.append("DecisionTreeClassifier") result.append("RandomForestClassifier") result.append("AdaBoostClassifier") result.append("GaussianNB") result.append("LogisticRegression") result.append("QuadraticDiscriminantAnalysis") return result
Example #20
Source File: test_doublyrobust.py From causallib with Apache License 2.0 | 4 votes |
def test_many_models(self): from sklearn.ensemble import GradientBoostingRegressor, RandomForestRegressor from sklearn.neural_network import MLPRegressor from sklearn.linear_model import ElasticNet, RANSACRegressor, HuberRegressor, PassiveAggressiveRegressor from sklearn.neighbors import KNeighborsRegressor from sklearn.svm import SVR, LinearSVR from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier from sklearn.neural_network import MLPClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.exceptions import ConvergenceWarning warnings.filterwarnings('ignore', category=ConvergenceWarning) data = self.create_uninformative_ox_dataset() for propensity_learner in [GradientBoostingClassifier(n_estimators=10), RandomForestClassifier(n_estimators=100), MLPClassifier(hidden_layer_sizes=(5,)), KNeighborsClassifier(n_neighbors=20)]: weight_model = IPW(propensity_learner) propensity_learner_name = str(propensity_learner).split("(", maxsplit=1)[0] for outcome_learner in [GradientBoostingRegressor(n_estimators=10), RandomForestRegressor(n_estimators=10), RANSACRegressor(), HuberRegressor(), SVR(), LinearSVR()]: outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0] outcome_model = Standardization(outcome_learner) with self.subTest("Test fit using {} & {}".format(propensity_learner_name, outcome_learner_name)): model = self.estimator.__class__(outcome_model, weight_model) model.fit(data["X"], data["a"], data["y"], refit_weight_model=False) self.assertTrue(True) # Fit did not crash for outcome_learner in [MLPRegressor(hidden_layer_sizes=(5,)), ElasticNet(), PassiveAggressiveRegressor(), KNeighborsRegressor()]: outcome_learner_name = str(outcome_learner).split("(", maxsplit=1)[0] outcome_model = Standardization(outcome_learner) with self.subTest("Test fit using {} & {}".format(propensity_learner_name, outcome_learner_name)): model = self.estimator.__class__(outcome_model, weight_model) with self.assertRaises(TypeError): # Joffe forces learning with sample_weights, # not all ML models support that and so calling should fail model.fit(data["X"], data["a"], data["y"], refit_weight_model=False)
Example #21
Source File: expected_rank_regression.py From cs-ranking with Apache License 2.0 | 4 votes |
def fit(self, X, Y, **kwargs): """ Fit an ExpectedRankRegression on the provided set of queries X and preferences Y of those objects. The provided queries and corresponding preferences are of a fixed size (numpy arrays). Parameters ---------- X : numpy array (n_instances, n_objects, n_features) Feature vectors of the objects Y : numpy array (n_instances, n_objects) Rankings of the given objects **kwargs Keyword arguments for the fit function """ self.random_state_ = check_random_state(self.random_state) self.logger.debug("Creating the Dataset") x_train, y_train = complete_linear_regression_dataset(X, Y) self.logger.debug("Finished the Dataset") if self.alpha < 1e-3: self.model = LinearRegression( normalize=self.normalize, fit_intercept=self.fit_intercept ) self.logger.info("LinearRegression") else: if self.l1_ratio >= 0.01: self.model = ElasticNet( alpha=self.alpha, l1_ratio=self.l1_ratio, normalize=self.normalize, tol=self.tol, fit_intercept=self.fit_intercept, random_state=self.random_state_, ) self.logger.info("Elastic Net") else: self.model = Ridge( alpha=self.alpha, normalize=self.normalize, tol=self.tol, fit_intercept=self.fit_intercept, random_state=self.random_state_, ) self.logger.info("Ridge") self.logger.debug("Finished Creating the model, now fitting started") self.model.fit(x_train, y_train) self.weights = self.model.coef_.flatten() if self.fit_intercept: self.weights = np.append(self.weights, self.model.intercept_) self.logger.debug("Fitting Complete")
Example #22
Source File: train_linear.py From mlflow-apps with Apache License 2.0 | 4 votes |
def train(training_pandas_data, test_pandas_data, label_col, feat_cols, alpha, l1_ratio, training_data_path, test_data_path): print("train: " + training_data_path) print("test: " + test_data_path) print("alpha: ", alpha) print("l1-ratio: ", l1_ratio) print("label-col: " + label_col) for col in feat_cols: print("feat-cols: " + col) # Split data into training labels and testing labels. trainingLabels = training_pandas_data[label_col].values trainingFeatures = training_pandas_data[feat_cols].values testLabels = test_pandas_data[label_col].values testFeatures = test_pandas_data[feat_cols].values #We will use a linear Elastic Net model. en = ElasticNet(alpha=alpha, l1_ratio=l1_ratio) # Here we train the model. en.fit(trainingFeatures, trainingLabels) # Calculating the scores of the model. test_rmse = mean_squared_error(testLabels, en.predict(testFeatures))**0.5 r2_score_training = en.score(trainingFeatures, trainingLabels) r2_score_test = en.score(testFeatures, testLabels) print("Test RMSE:", test_rmse) print("Training set score:", r2_score_training) print("Test set score:", r2_score_test) #Logging the RMSE and r2 scores. mlflow.log_metric("Test RMSE", test_rmse) mlflow.log_metric("Train R2", r2_score_training) mlflow.log_metric("Test R2", r2_score_test) #Saving the model as an artifact. sklearn.log_model(en, "model") run_id = mlflow.active_run().info.run_uuid print("Run with id %s finished" % run_id)
Example #23
Source File: scikitlearn.py From sia-cog with MIT License | 4 votes |
def getSKLearnModel(modelName): if modelName == 'LinearRegression': model = linear_model.LinearRegression() elif modelName == 'BayesianRidge': model = linear_model.BayesianRidge() elif modelName == 'ARDRegression': model = linear_model.ARDRegression() elif modelName == 'ElasticNet': model = linear_model.ElasticNet() elif modelName == 'HuberRegressor': model = linear_model.HuberRegressor() elif modelName == 'Lasso': model = linear_model.Lasso() elif modelName == 'LassoLars': model = linear_model.LassoLars() elif modelName == 'Rigid': model = linear_model.Ridge() elif modelName == 'SGDRegressor': model = linear_model.SGDRegressor() elif modelName == 'SVR': model = SVR() elif modelName=='MLPClassifier': model = MLPClassifier() elif modelName=='KNeighborsClassifier': model = KNeighborsClassifier() elif modelName=='SVC': model = SVC() elif modelName=='GaussianProcessClassifier': model = GaussianProcessClassifier() elif modelName=='DecisionTreeClassifier': model = DecisionTreeClassifier() elif modelName=='RandomForestClassifier': model = RandomForestClassifier() elif modelName=='AdaBoostClassifier': model = AdaBoostClassifier() elif modelName=='GaussianNB': model = GaussianNB() elif modelName=='LogisticRegression': model = linear_model.LogisticRegression() elif modelName=='QuadraticDiscriminantAnalysis': model = QuadraticDiscriminantAnalysis() return model