Python pyspark.ml.Model() Examples
The following are 9
code examples of pyspark.ml.Model().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
pyspark.ml
, or try the search function
.
Example #1
Source File: tests.py From LearningApacheSpark with MIT License | 6 votes |
def test_java_params(self): import pyspark.ml.feature import pyspark.ml.classification import pyspark.ml.clustering import pyspark.ml.evaluation import pyspark.ml.pipeline import pyspark.ml.recommendation import pyspark.ml.regression modules = [pyspark.ml.feature, pyspark.ml.classification, pyspark.ml.clustering, pyspark.ml.evaluation, pyspark.ml.pipeline, pyspark.ml.recommendation, pyspark.ml.regression] for module in modules: for name, cls in inspect.getmembers(module, inspect.isclass): if not name.endswith('Model') and not name.endswith('Params')\ and issubclass(cls, JavaParams) and not inspect.isabstract(cls): # NOTE: disable check_params_exist until there is parity with Scala API ParamTests.check_params(self, cls(), check_params_exist=False) # Additional classes that need explicit construction from pyspark.ml.feature import CountVectorizerModel, StringIndexerModel ParamTests.check_params(self, CountVectorizerModel.from_vocabulary(['a'], 'input'), check_params_exist=False) ParamTests.check_params(self, StringIndexerModel.from_labels(['a', 'b'], 'input'), check_params_exist=False)
Example #2
Source File: classification.py From LearningApacheSpark with MIT License | 6 votes |
def _make_java_param_pair(self, param, value): """ Makes a Java param pair. """ sc = SparkContext._active_spark_context param = self._resolveParam(param) _java_obj = JavaParams._new_java_obj("org.apache.spark.ml.classification.OneVsRest", self.uid) java_param = _java_obj.getParam(param.name) if isinstance(value, JavaParams): # used in the case of an estimator having another estimator as a parameter # the reason why this is not in _py2java in common.py is that importing # Estimator and Model in common.py results in a circular import with inherit_doc java_value = value._to_java() else: java_value = _py2java(sc, value) return java_param.w(java_value)
Example #3
Source File: tests.py From LearningApacheSpark with MIT License | 5 votes |
def test_idf(self): dataset = self.spark.createDataFrame([ (DenseVector([1.0, 2.0]),), (DenseVector([0.0, 1.0]),), (DenseVector([3.0, 0.2]),)], ["tf"]) idf0 = IDF(inputCol="tf") self.assertListEqual(idf0.params, [idf0.inputCol, idf0.minDocFreq, idf0.outputCol]) idf0m = idf0.fit(dataset, {idf0.outputCol: "idf"}) self.assertEqual(idf0m.uid, idf0.uid, "Model should inherit the UID from its parent estimator.") output = idf0m.transform(dataset) self.assertIsNotNone(output.head().idf) # Test that parameters transferred to Python Model ParamTests.check_params(self, idf0m)
Example #4
Source File: classification.py From LearningApacheSpark with MIT License | 5 votes |
def coefficients(self): """ Model coefficients of Linear SVM Classifier. """ return self._call_java("coefficients")
Example #5
Source File: classification.py From LearningApacheSpark with MIT License | 5 votes |
def intercept(self): """ Model intercept of Linear SVM Classifier. """ return self._call_java("intercept")
Example #6
Source File: classification.py From LearningApacheSpark with MIT License | 5 votes |
def coefficients(self): """ Model coefficients of binomial logistic regression. An exception is thrown in the case of multinomial logistic regression. """ return self._call_java("coefficients")
Example #7
Source File: classification.py From LearningApacheSpark with MIT License | 5 votes |
def coefficientMatrix(self): """ Model coefficients. """ return self._call_java("coefficientMatrix")
Example #8
Source File: classification.py From LearningApacheSpark with MIT License | 5 votes |
def interceptVector(self): """ Model intercept. """ return self._call_java("interceptVector")
Example #9
Source File: spark.py From mlflow with Apache License 2.0 | 5 votes |
def _validate_model(spark_model): from pyspark.ml.util import MLReadable, MLWritable from pyspark.ml import Model as PySparkModel if not isinstance(spark_model, PySparkModel) \ or not isinstance(spark_model, MLReadable) \ or not isinstance(spark_model, MLWritable): raise MlflowException( "Cannot serialize this model. MLflow can only save descendants of pyspark.Model" "that implement MLWritable and MLReadable.", INVALID_PARAMETER_VALUE)