Java Code Examples for org.jpmml.converter.mining.MiningModelUtil#createClassification()
The following examples show how to use
org.jpmml.converter.mining.MiningModelUtil#createClassification() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MultinomialLogisticRegression.java From jpmml-lightgbm with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeMiningModel(List<Tree> trees, Integer numIteration, Schema schema){ Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); List<MiningModel> miningModels = new ArrayList<>(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); for(int i = 0, rows = categoricalLabel.size(), columns = (trees.size() / rows); i < rows; i++){ MiningModel miningModel = createMiningModel(FortranMatrixUtil.getRow(trees, rows, columns, i), numIteration, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("lgbmValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example 2
Source File: MultinomialLogisticRegression.java From jpmml-xgboost with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeMiningModel(List<RegTree> trees, List<Float> weights, float base_score, Integer ntreeLimit, Schema schema){ Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.FLOAT); List<MiningModel> miningModels = new ArrayList<>(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); for(int i = 0, columns = categoricalLabel.size(), rows = (trees.size() / columns); i < columns; i++){ MiningModel miningModel = createMiningModel(CMatrixUtil.getColumn(trees, rows, columns, i), (weights != null) ? CMatrixUtil.getColumn(weights, rows, columns, i) : null, base_score, ntreeLimit, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("xgbValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.FLOAT)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example 3
Source File: LinearDiscriminantAnalysis.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 4 votes |
private Model encodeMultinomialModel(Schema schema){ String sklearnVersion = getSkLearnVersion(); int[] shape = getCoefShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); // See https://github.com/scikit-learn/scikit-learn/issues/6848 boolean corrected = (sklearnVersion != null && SkLearnUtil.compareVersion(sklearnVersion, "0.21") >= 0); if(!corrected){ return super.encodeModel(schema); } // End if if(numberOfClasses >= 3){ SchemaUtil.checkSize(numberOfClasses, categoricalLabel); Schema segmentSchema = (schema.toAnonymousRegressorSchema(DataType.DOUBLE)).toEmptySchema(); List<RegressionModel> regressionModels = new ArrayList<>(); for(int i = 0, rows = categoricalLabel.size(); i < rows; i++){ RegressionModel regressionModel = RegressionModelUtil.createRegression(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, i), intercept.get(i), RegressionModel.NormalizationMethod.NONE, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); regressionModels.add(regressionModel); } return MiningModelUtil.createClassification(regressionModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); } else { throw new IllegalArgumentException(); } }
Example 4
Source File: GBMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 4 votes |
private MiningModel encodeMultinomialClassification(List<TreeModel> treeModels, Double initF, Schema schema){ CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); List<Model> miningModels = new ArrayList<>(); for(int i = 0, columns = categoricalLabel.size(), rows = (treeModels.size() / columns); i < columns; i++){ MiningModel miningModel = createMiningModel(CMatrixUtil.getColumn(treeModels, rows, columns, i), initF, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("gbmValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example 5
Source File: LinearClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 2 votes |
@Override public Model encodeModel(Schema schema){ int[] shape = getCoefShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; boolean hasProbabilityDistribution = hasProbabilityDistribution(); List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); if(numberOfClasses == 1){ SchemaUtil.checkSize(2, categoricalLabel); return RegressionModelUtil.createBinaryLogisticClassification(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, 0), intercept.get(0), RegressionModel.NormalizationMethod.LOGIT, hasProbabilityDistribution, schema); } else if(numberOfClasses >= 3){ SchemaUtil.checkSize(numberOfClasses, categoricalLabel); Schema segmentSchema = (schema.toAnonymousRegressorSchema(DataType.DOUBLE)).toEmptySchema(); List<RegressionModel> regressionModels = new ArrayList<>(); for(int i = 0, rows = categoricalLabel.size(); i < rows; i++){ RegressionModel regressionModel = RegressionModelUtil.createRegression(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, i), intercept.get(i), RegressionModel.NormalizationMethod.LOGIT, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); regressionModels.add(regressionModel); } return MiningModelUtil.createClassification(regressionModels, RegressionModel.NormalizationMethod.SIMPLEMAX, hasProbabilityDistribution, schema); } else { throw new IllegalArgumentException(); } }