org.jpmml.converter.regression.RegressionModelUtil Java Examples
The following examples show how to use
org.jpmml.converter.regression.RegressionModelUtil.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LinearRegressor.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
@Override public RegressionModel encodeModel(Schema schema){ List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); return RegressionModelUtil.createRegression(schema.getFeatures(), coef, Iterables.getOnlyElement(intercept), null, schema); }
Example #2
Source File: DummyRegressor.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
@Override public RegressionModel encodeModel(Schema schema){ List<? extends Number> constant = getConstant(); Number intercept = Iterables.getOnlyElement(constant); return RegressionModelUtil.createRegression(Collections.emptyList(), Collections.emptyList(), intercept.doubleValue(), null, schema); }
Example #3
Source File: LogNetConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
@Override public Model encodeModel(RDoubleVector a0, RExp beta, int column, Schema schema){ Double intercept = a0.getValue(column); List<Double> coefficients = getCoefficients((S4Object)beta, column); return RegressionModelUtil.createBinaryLogisticClassification(schema.getFeatures(), coefficients, intercept, RegressionModel.NormalizationMethod.LOGIT, true, schema); }
Example #4
Source File: LinearModelUtil.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 4 votes |
static public <C extends ModelConverter<?> & HasRegressionTableOptions> Model createRegression(C converter, Vector coefficients, double intercept, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); String representation = (String)converter.getOption(HasRegressionTableOptions.OPTION_REPRESENTATION, null); List<Feature> features = new ArrayList<>(schema.getFeatures()); List<Double> featureCoefficients = new ArrayList<>(VectorUtil.toList(coefficients)); RegressionTableUtil.simplify(converter, null, features, featureCoefficients); if(representation != null && (GeneralRegressionModel.class.getSimpleName()).equalsIgnoreCase(representation)){ GeneralRegressionModel generalRegressionModel = new GeneralRegressionModel(GeneralRegressionModel.ModelType.REGRESSION, MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel), null, null, null); GeneralRegressionModelUtil.encodeRegressionTable(generalRegressionModel, features, featureCoefficients, intercept, null); return generalRegressionModel; } return RegressionModelUtil.createRegression(features, featureCoefficients, intercept, NormalizationMethod.NONE, schema); }
Example #5
Source File: LinearModelUtil.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 4 votes |
static public <C extends ModelConverter<?> & HasRegressionTableOptions> Model createBinaryLogisticClassification(C converter, Vector coefficients, double intercept, Schema schema){ CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); String representation = (String)converter.getOption(HasRegressionTableOptions.OPTION_REPRESENTATION, null); List<Feature> features = new ArrayList<>(schema.getFeatures()); List<Double> featureCoefficients = new ArrayList<>(VectorUtil.toList(coefficients)); RegressionTableUtil.simplify(converter, null, features, featureCoefficients); if(representation != null && (GeneralRegressionModel.class.getSimpleName()).equalsIgnoreCase(representation)){ Object targetCategory = categoricalLabel.getValue(1); GeneralRegressionModel generalRegressionModel = new GeneralRegressionModel(GeneralRegressionModel.ModelType.GENERALIZED_LINEAR, MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), null, null, null) .setLinkFunction(GeneralRegressionModel.LinkFunction.LOGIT); GeneralRegressionModelUtil.encodeRegressionTable(generalRegressionModel, features, featureCoefficients, intercept, targetCategory); return generalRegressionModel; } return RegressionModelUtil.createBinaryLogisticClassification(features, featureCoefficients, intercept, RegressionModel.NormalizationMethod.LOGIT, true, schema); }
Example #6
Source File: LinearDiscriminantAnalysis.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 4 votes |
private Model encodeMultinomialModel(Schema schema){ String sklearnVersion = getSkLearnVersion(); int[] shape = getCoefShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); // See https://github.com/scikit-learn/scikit-learn/issues/6848 boolean corrected = (sklearnVersion != null && SkLearnUtil.compareVersion(sklearnVersion, "0.21") >= 0); if(!corrected){ return super.encodeModel(schema); } // End if if(numberOfClasses >= 3){ SchemaUtil.checkSize(numberOfClasses, categoricalLabel); Schema segmentSchema = (schema.toAnonymousRegressorSchema(DataType.DOUBLE)).toEmptySchema(); List<RegressionModel> regressionModels = new ArrayList<>(); for(int i = 0, rows = categoricalLabel.size(); i < rows; i++){ RegressionModel regressionModel = RegressionModelUtil.createRegression(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, i), intercept.get(i), RegressionModel.NormalizationMethod.NONE, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); regressionModels.add(regressionModel); } return MiningModelUtil.createClassification(regressionModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); } else { throw new IllegalArgumentException(); } }
Example #7
Source File: LMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 4 votes |
@Override public Model encodeModel(Schema schema){ RGenericVector lm = getObject(); RDoubleVector coefficients = lm.getDoubleElement("coefficients"); Double intercept = coefficients.getElement(getInterceptName(), false); List<? extends Feature> features = schema.getFeatures(); SchemaUtil.checkSize(coefficients.size() - (intercept != null ? 1 : 0), features); List<Double> featureCoefficients = getFeatureCoefficients(features, coefficients); return RegressionModelUtil.createRegression(features, featureCoefficients, intercept, null, schema); }
Example #8
Source File: MiningModelUtil.java From pyramid with Apache License 2.0 | 4 votes |
static public MiningModel createClassification(List<? extends Model> models, RegressionModel.NormalizationMethod normalizationMethod, boolean hasProbabilityDistribution, Schema schema){ CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); // modified here if(categoricalLabel.size() != models.size()){ throw new IllegalArgumentException(); } // End if if(normalizationMethod != null){ switch(normalizationMethod){ case NONE: case SIMPLEMAX: case SOFTMAX: break; default: throw new IllegalArgumentException(); } } MathContext mathContext = null; List<RegressionTable> regressionTables = new ArrayList<>(); for(int i = 0; i < categoricalLabel.size(); i++){ Model model = models.get(i); MathContext modelMathContext = model.getMathContext(); if(modelMathContext == null){ modelMathContext = MathContext.DOUBLE; } // End if if(mathContext == null){ mathContext = modelMathContext; } else { if(!Objects.equals(mathContext, modelMathContext)){ throw new IllegalArgumentException(); } } Feature feature = MODEL_PREDICTION.apply(model); RegressionTable regressionTable = RegressionModelUtil.createRegressionTable(Collections.singletonList(feature), Collections.singletonList(1d), null) .setTargetCategory(categoricalLabel.getValue(i)); regressionTables.add(regressionTable); } RegressionModel regressionModel = new RegressionModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), regressionTables) .setNormalizationMethod(normalizationMethod) .setMathContext(ModelUtil.simplifyMathContext(mathContext)) .setOutput(hasProbabilityDistribution ? ModelUtil.createProbabilityOutput(mathContext, categoricalLabel) : null); List<Model> segmentationModels = new ArrayList<>(models); segmentationModels.add(regressionModel); return createModelChain(segmentationModels, schema); }
Example #9
Source File: LinearModelUtil.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 3 votes |
static public <C extends ModelConverter<?> & HasRegressionTableOptions> Model createSoftmaxClassification(C converter, Matrix coefficients, Vector intercepts, Schema schema){ CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); MatrixUtil.checkRows(categoricalLabel.size(), coefficients); List<RegressionTable> regressionTables = new ArrayList<>(); for(int i = 0; i < categoricalLabel.size(); i++){ Object targetCategory = categoricalLabel.getValue(i); List<Feature> features = new ArrayList<>(schema.getFeatures()); List<Double> featureCoefficients = new ArrayList<>(MatrixUtil.getRow(coefficients, i)); RegressionTableUtil.simplify(converter, targetCategory, features, featureCoefficients); double intercept = intercepts.apply(i); RegressionTable regressionTable = RegressionModelUtil.createRegressionTable(features, featureCoefficients, intercept) .setTargetCategory(targetCategory); regressionTables.add(regressionTable); } RegressionModel regressionModel = new RegressionModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), regressionTables) .setNormalizationMethod(RegressionModel.NormalizationMethod.SOFTMAX); return regressionModel; }
Example #10
Source File: LinearClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 2 votes |
@Override public Model encodeModel(Schema schema){ int[] shape = getCoefShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; boolean hasProbabilityDistribution = hasProbabilityDistribution(); List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); if(numberOfClasses == 1){ SchemaUtil.checkSize(2, categoricalLabel); return RegressionModelUtil.createBinaryLogisticClassification(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, 0), intercept.get(0), RegressionModel.NormalizationMethod.LOGIT, hasProbabilityDistribution, schema); } else if(numberOfClasses >= 3){ SchemaUtil.checkSize(numberOfClasses, categoricalLabel); Schema segmentSchema = (schema.toAnonymousRegressorSchema(DataType.DOUBLE)).toEmptySchema(); List<RegressionModel> regressionModels = new ArrayList<>(); for(int i = 0, rows = categoricalLabel.size(); i < rows; i++){ RegressionModel regressionModel = RegressionModelUtil.createRegression(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, i), intercept.get(i), RegressionModel.NormalizationMethod.LOGIT, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); regressionModels.add(regressionModel); } return MiningModelUtil.createClassification(regressionModels, RegressionModel.NormalizationMethod.SIMPLEMAX, hasProbabilityDistribution, schema); } else { throw new IllegalArgumentException(); } }