Python sklearn.linear_model.SGDRegressor() Examples
The following are 30
code examples of sklearn.linear_model.SGDRegressor().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sklearn.linear_model
, or try the search function
.
Example #1
Source File: test_multioutput.py From Mastering-Elasticsearch-7.0 with MIT License | 8 votes |
def test_multi_target_regression_partial_fit(): X, y = datasets.make_regression(n_targets=3) X_train, y_train = X[:50], y[:50] X_test, y_test = X[50:], y[50:] references = np.zeros_like(y_test) half_index = 25 for n in range(3): sgr = SGDRegressor(random_state=0, max_iter=5) sgr.partial_fit(X_train[:half_index], y_train[:half_index, n]) sgr.partial_fit(X_train[half_index:], y_train[half_index:, n]) references[:, n] = sgr.predict(X_test) sgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5)) sgr.partial_fit(X_train[:half_index], y_train[:half_index]) sgr.partial_fit(X_train[half_index:], y_train[half_index:]) y_pred = sgr.predict(X_test) assert_almost_equal(references, y_pred) assert not hasattr(MultiOutputRegressor(Lasso), 'partial_fit')
Example #2
Source File: test_robust_weighted_estimator.py From scikit-learn-extra with BSD 3-Clause "New" or "Revised" License | 7 votes |
def test_not_robust_regression(loss, weighting): clf = RobustWeightedEstimator( SGDRegressor(), loss=loss, max_iter=100, weighting=weighting, k=0, c=1e7, burn_in=0, random_state=rng, ) clf_not_rob = SGDRegressor(loss=loss, random_state=rng) clf.fit(X_r, y_r) clf_not_rob.fit(X_r, y_r) pred1 = clf.predict(X_r) pred2 = clf_not_rob.predict(X_r) assert np.linalg.norm(pred1 - pred2) / np.linalg.norm( pred2 ) < np.linalg.norm(pred1 - y_r) / np.linalg.norm(y_r)
Example #3
Source File: test_sklearn_bagging_converter.py From sklearn-onnx with MIT License | 7 votes |
def test_bagging_regressor_sgd(self): model, X = fit_regression_model( BaggingRegressor(SGDRegressor())) model_onnx = convert_sklearn( model, "bagging regressor", [("input", FloatTensorType([None, X.shape[1]]))], dtype=np.float32, ) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnBaggingRegressorSGD-Dec4", allow_failure="StrictVersion(onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #4
Source File: test_multioutput.py From twitter-stock-recommendation with MIT License | 6 votes |
def test_multi_target_regression_partial_fit(): X, y = datasets.make_regression(n_targets=3) X_train, y_train = X[:50], y[:50] X_test, y_test = X[50:], y[50:] references = np.zeros_like(y_test) half_index = 25 for n in range(3): sgr = SGDRegressor(random_state=0, max_iter=5) sgr.partial_fit(X_train[:half_index], y_train[:half_index, n]) sgr.partial_fit(X_train[half_index:], y_train[half_index:, n]) references[:, n] = sgr.predict(X_test) sgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5)) sgr.partial_fit(X_train[:half_index], y_train[:half_index]) sgr.partial_fit(X_train[half_index:], y_train[half_index:]) y_pred = sgr.predict(X_test) assert_almost_equal(references, y_pred) assert_false(hasattr(MultiOutputRegressor(Lasso), 'partial_fit'))
Example #5
Source File: estimator.py From EDeN with MIT License | 6 votes |
def set_params(self, r=3, d=8, nbits=16, discrete=True, normalization=True, inner_normalization=True, penalty='elasticnet', loss='squared_loss'): """setter.""" self.r = r self.d = d self.nbits = nbits self.normalization = normalization self.inner_normalization = inner_normalization self.discrete = discrete self.model = SGDRegressor( loss=loss, penalty=penalty, average=True, shuffle=True, max_iter=5, tol=None) self.vectorizer = Vectorizer( r=self.r, d=self.d, normalization=self.normalization, inner_normalization=self.inner_normalization, discrete=self.discrete, nbits=self.nbits) return self
Example #6
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 6 votes |
def test_model_sgd_regressor_bool(self): model, X = fit_regression_model( linear_model.SGDRegressor(), is_bool=True) model_onnx = convert_sklearn( model, "SGD regression", [("input", BooleanTensorType([None, X.shape[1]]))]) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnSGDRegressorBool", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #7
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 6 votes |
def test_model_sgd_regressor_int(self): model, X = fit_regression_model( linear_model.SGDRegressor(), is_int=True) model_onnx = convert_sklearn( model, "SGD regression", [("input", Int64TensorType([None, X.shape[1]]))]) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnSGDRegressorInt-Dec4", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #8
Source File: test_sklearn_glm_regressor_converter.py From sklearn-onnx with MIT License | 6 votes |
def test_model_sgd_regressor(self): model, X = fit_regression_model(linear_model.SGDRegressor()) model_onnx = convert_sklearn( model, "scikit-learn SGD regression", [("input", FloatTensorType([None, X.shape[1]]))], ) self.assertIsNotNone(model_onnx) dump_data_and_model( X, model, model_onnx, basename="SklearnSGDRegressor-Dec4", allow_failure="StrictVersion(" "onnxruntime.__version__)" "<= StrictVersion('0.2.1')", )
Example #9
Source File: ILearner.py From aca with MIT License | 6 votes |
def __init__(self): self.clf = linear_model.SGDRegressor(n_iter=50)
Example #10
Source File: ensemble_glm.py From jh-kaggle-util with Apache License 2.0 | 6 votes |
def fit_ensemble(x,y): fit_type = jhkaggle.jhkaggle_config['FIT_TYPE'] if 1: if fit_type == jhkaggle.const.FIT_TYPE_BINARY_CLASSIFICATION: blend = SGDClassifier(loss="log", penalty="elasticnet") # LogisticRegression() else: # blend = SGDRegressor() #blend = LinearRegression() #blend = RandomForestRegressor(n_estimators=10, n_jobs=-1, max_depth=5, criterion='mae') blend = LassoLarsCV(normalize=True) #blend = ElasticNetCV(normalize=True) #blend = LinearRegression(normalize=True) blend.fit(x, y) else: blend = LogisticRegression() blend.fit(x, y) return blend
Example #11
Source File: cobra.py From pycobra with MIT License | 5 votes |
def load_default(self, machine_list='basic'): """ Loads 4 different scikit-learn regressors by default. The advanced list adds more machines. Parameters ---------- machine_list: optional, list of strings List of default machine names to be loaded. Returns ------- self : returns an instance of self. """ if machine_list == 'basic': machine_list = ['tree', 'ridge', 'random_forest', 'svm'] if machine_list == 'advanced': machine_list=['lasso', 'tree', 'ridge', 'random_forest', 'svm', 'bayesian_ridge', 'sgd'] self.estimators_ = {} for machine in machine_list: try: if machine == 'lasso': self.estimators_['lasso'] = linear_model.LassoCV(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'tree': self.estimators_['tree'] = DecisionTreeRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'ridge': self.estimators_['ridge'] = linear_model.RidgeCV().fit(self.X_k_, self.y_k_) if machine == 'random_forest': self.estimators_['random_forest'] = RandomForestRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'svm': self.estimators_['svm'] = LinearSVR(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'sgd': self.estimators_['sgd'] = linear_model.SGDRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'bayesian_ridge': self.estimators_['bayesian_ridge'] = linear_model.BayesianRidge().fit(self.X_k_, self.y_k_) except ValueError: continue return self
Example #12
Source File: test_core_operators.py From lale with Apache License 2.0 | 5 votes |
def test_sgd_regressor_1(self): from lale.lib.sklearn import SGDRegressor reg = SGDRegressor(learning_rate='optimal', eta0=0.2) reg.fit(self.X_train, self.y_train)
Example #13
Source File: kernelcobra.py From pycobra with MIT License | 5 votes |
def load_default(self, machine_list='basic'): """ Loads 4 different scikit-learn regressors by default. The advanced list adds more machines. Parameters ---------- machine_list: optional, list of strings List of default machine names to be loaded. Default is basic, Returns ------- self : returns an instance of self. """ if machine_list == 'basic': machine_list = ['tree', 'ridge', 'random_forest', 'svm'] if machine_list == 'advanced': machine_list=['lasso', 'tree', 'ridge', 'random_forest', 'svm', 'bayesian_ridge', 'sgd'] self.estimators_ = {} for machine in machine_list: try: if machine == 'lasso': self.estimators_['lasso'] = linear_model.LassoCV(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'tree': self.estimators_['tree'] = DecisionTreeRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'ridge': self.estimators_['ridge'] = linear_model.RidgeCV().fit(self.X_k_, self.y_k_) if machine == 'random_forest': self.estimators_['random_forest'] = RandomForestRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'svm': self.estimators_['svm'] = SVR().fit(self.X_k_, self.y_k_) if machine == 'sgd': self.estimators_['sgd'] = linear_model.SGDRegressor(random_state=self.random_state).fit(self.X_k_, self.y_k_) if machine == 'bayesian_ridge': self.estimators_['bayesian_ridge'] = linear_model.BayesianRidge().fit(self.X_k_, self.y_k_) except ValueError: continue return self
Example #14
Source File: SGDRegressor.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, options): self.handle_options(options) out_params = convert_params( options.get('params', {}), bools=['fit_intercept'], ints=['random_state', 'n_iter'], floats=['l1_ratio', 'alpha', 'eta0', 'power_t'], strs=['penalty', 'learning_rate'], ) self.scaler = StandardScaler() self.estimator = _SGDRegressor(**out_params) self.columns = None
Example #15
Source File: PLECscore.py From oddt with BSD 3-Clause "New" or "Revised" License | 5 votes |
def gen_json(self, home_dir=None, pdbbind_version=2016): if not home_dir: home_dir = path_join(dirname(__file__), 'PLECscore') if isinstance(self.model, SGDRegressor): attributes = ['coef_', 'intercept_', 't_'] elif isinstance(self.model, MLPRegressor): attributes = ['loss_', 'coefs_', 'intercepts_', 'n_iter_', 'n_layers_', 'n_outputs_', 'out_activation_'] out = {} for attr_name in attributes: attr = getattr(self.model, attr_name) # convert numpy arrays to list for json if isinstance(attr, np.ndarray): attr = attr.tolist() elif (isinstance(attr, (list, tuple)) and isinstance(attr[0], np.ndarray)): attr = [x.tolist() for x in attr] out[attr_name] = attr json_path = path_join(home_dir, 'plecscore_%s_p%i_l%i_s%i_pdbbind%i.json' % (self.version, self.depth_protein, self.depth_ligand, self.size, pdbbind_version)) with open(json_path, 'w') as json_f: json.dump(out, json_f, indent=2) return json_path
Example #16
Source File: __init__.py From sklearn2pmml with GNU Affero General Public License v3.0 | 5 votes |
def test_lm(self): _checkLM(ElasticNet()) _checkLM(LinearRegression()) _checkLM(SGDRegressor())
Example #17
Source File: run_models.py From AirBnbPricePrediction with MIT License | 5 votes |
def linear_model_SGD(X_train, y_train, X_val, y_val): model = SGDRegressor() model.fit(X_train, y_train) print_evaluation_metrics(model, "sgd", X_val, y_val.values.ravel()) print_evaluation_metrics2(model, "sgd", X_train, y_train.values.ravel())
Example #18
Source File: test_linear_model.py From pandas-ml with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_objectmapper(self): df = pdml.ModelFrame([]) self.assertIs(df.linear_model.ARDRegression, lm.ARDRegression) self.assertIs(df.linear_model.BayesianRidge, lm.BayesianRidge) self.assertIs(df.linear_model.ElasticNet, lm.ElasticNet) self.assertIs(df.linear_model.ElasticNetCV, lm.ElasticNetCV) self.assertIs(df.linear_model.HuberRegressor, lm.HuberRegressor) self.assertIs(df.linear_model.Lars, lm.Lars) self.assertIs(df.linear_model.LarsCV, lm.LarsCV) self.assertIs(df.linear_model.Lasso, lm.Lasso) self.assertIs(df.linear_model.LassoCV, lm.LassoCV) self.assertIs(df.linear_model.LassoLars, lm.LassoLars) self.assertIs(df.linear_model.LassoLarsCV, lm.LassoLarsCV) self.assertIs(df.linear_model.LassoLarsIC, lm.LassoLarsIC) self.assertIs(df.linear_model.LinearRegression, lm.LinearRegression) self.assertIs(df.linear_model.LogisticRegression, lm.LogisticRegression) self.assertIs(df.linear_model.LogisticRegressionCV, lm.LogisticRegressionCV) self.assertIs(df.linear_model.MultiTaskLasso, lm.MultiTaskLasso) self.assertIs(df.linear_model.MultiTaskElasticNet, lm.MultiTaskElasticNet) self.assertIs(df.linear_model.MultiTaskLassoCV, lm.MultiTaskLassoCV) self.assertIs(df.linear_model.MultiTaskElasticNetCV, lm.MultiTaskElasticNetCV) self.assertIs(df.linear_model.OrthogonalMatchingPursuit, lm.OrthogonalMatchingPursuit) self.assertIs(df.linear_model.OrthogonalMatchingPursuitCV, lm.OrthogonalMatchingPursuitCV) self.assertIs(df.linear_model.PassiveAggressiveClassifier, lm.PassiveAggressiveClassifier) self.assertIs(df.linear_model.PassiveAggressiveRegressor, lm.PassiveAggressiveRegressor) self.assertIs(df.linear_model.Perceptron, lm.Perceptron) self.assertIs(df.linear_model.RandomizedLasso, lm.RandomizedLasso) self.assertIs(df.linear_model.RandomizedLogisticRegression, lm.RandomizedLogisticRegression) self.assertIs(df.linear_model.RANSACRegressor, lm.RANSACRegressor) self.assertIs(df.linear_model.Ridge, lm.Ridge) self.assertIs(df.linear_model.RidgeClassifier, lm.RidgeClassifier) self.assertIs(df.linear_model.RidgeClassifierCV, lm.RidgeClassifierCV) self.assertIs(df.linear_model.RidgeCV, lm.RidgeCV) self.assertIs(df.linear_model.SGDClassifier, lm.SGDClassifier) self.assertIs(df.linear_model.SGDRegressor, lm.SGDRegressor) self.assertIs(df.linear_model.TheilSenRegressor, lm.TheilSenRegressor)
Example #19
Source File: test_multioutput.py From twitter-stock-recommendation with MIT License | 5 votes |
def test_multi_target_sample_weight_partial_fit(): # weighted regressor X = [[1, 2, 3], [4, 5, 6]] y = [[3.141, 2.718], [2.718, 3.141]] w = [2., 1.] rgr_w = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5)) rgr_w.partial_fit(X, y, w) # weighted with different weights w = [2., 2.] rgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5)) rgr.partial_fit(X, y, w) assert_not_equal(rgr.predict(X)[0][0], rgr_w.predict(X)[0][0])
Example #20
Source File: test_core_operators.py From lale with Apache License 2.0 | 5 votes |
def test_sgd_regressor_3(self): from sklearn.linear_model import SGDRegressor reg = SGDRegressor(l1_ratio=0.2, penalty='l1') reg.fit(self.X_train, self.y_train)
Example #21
Source File: test_core_operators.py From lale with Apache License 2.0 | 5 votes |
def test_sgd_regressor_2(self): from lale.lib.sklearn import SGDRegressor reg = SGDRegressor(early_stopping=False, validation_fraction=0.2) reg.fit(self.X_train, self.y_train)
Example #22
Source File: test_core_operators.py From lale with Apache License 2.0 | 5 votes |
def test_sgd_regressor(self): from lale.lib.sklearn import SGDRegressor reg = SGDRegressor(loss='squared_loss', epsilon=0.2) reg.fit(self.X_train, self.y_train)
Example #23
Source File: transformers.py From mercari-solution with MIT License | 5 votes |
def fit(self, X, y, *args): sgd = SGDRegressor(penalty='l1', loss='squared_loss', alpha=3.0e-11, power_t=-0.12, eta0=0.019, random_state=0, average=True) sgd.fit(X, np.log1p(y)) coef_cutoff = np.percentile(np.abs(sgd.coef_), self.percentile_cutoff) self.features_to_keep = np.where(np.abs(sgd.coef_) >= coef_cutoff)[0] return self
Example #24
Source File: scikitlearn.py From sia-cog with MIT License | 5 votes |
def getModels(): result = [] result.append("LinearRegression") result.append("BayesianRidge") result.append("ARDRegression") result.append("ElasticNet") result.append("HuberRegressor") result.append("Lasso") result.append("LassoLars") result.append("Rigid") result.append("SGDRegressor") result.append("SVR") result.append("MLPClassifier") result.append("KNeighborsClassifier") result.append("SVC") result.append("GaussianProcessClassifier") result.append("DecisionTreeClassifier") result.append("RandomForestClassifier") result.append("AdaBoostClassifier") result.append("GaussianNB") result.append("LogisticRegression") result.append("QuadraticDiscriminantAnalysis") return result
Example #25
Source File: kalman_model.py From thingflow-python with Apache License 2.0 | 5 votes |
def __init__(self): OutputThing.__init__(self, ports=['train', 'observe', 'predict']) self.clf = linear_model.SGDRegressor()
Example #26
Source File: test_sgd.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def decision_function(self, X, *args, **kw): X = sp.csr_matrix(X) return linear_model.SGDRegressor.decision_function(self, X, *args, **kw)
Example #27
Source File: test_sgd.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def partial_fit(self, X, y, *args, **kw): X = sp.csr_matrix(X) return linear_model.SGDRegressor.partial_fit(self, X, y, *args, **kw)
Example #28
Source File: test_sgd.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def fit(self, X, y, *args, **kw): X = sp.csr_matrix(X) return linear_model.SGDRegressor.fit(self, X, y, *args, **kw)
Example #29
Source File: test_multioutput.py From Mastering-Elasticsearch-7.0 with MIT License | 5 votes |
def test_multi_target_sample_weight_partial_fit(): # weighted regressor X = [[1, 2, 3], [4, 5, 6]] y = [[3.141, 2.718], [2.718, 3.141]] w = [2., 1.] rgr_w = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5)) rgr_w.partial_fit(X, y, w) # weighted with different weights w = [2., 2.] rgr = MultiOutputRegressor(SGDRegressor(random_state=0, max_iter=5)) rgr.partial_fit(X, y, w) assert_not_equal(rgr.predict(X)[0][0], rgr_w.predict(X)[0][0])
Example #30
Source File: regressor_chains.py From scikit-multiflow with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self, base_estimator=SGDRegressor(), order=None, random_state=None): super().__init__() self.base_estimator = base_estimator self.order = order self.random_state = random_state self.chain = None self.ensemble = None self.L = None self._random_state = None # This is the actual random_state object used internally self.__configure()