org.jpmml.converter.ModelUtil Java Examples
The following examples show how to use
org.jpmml.converter.ModelUtil.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: VotingRegressor.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 6 votes |
@Override public Model encodeModel(Schema schema){ List<? extends Regressor> estimators = getEstimators(); List<? extends Number> weights = getWeights(); List<Model> models = new ArrayList<>(); for(Regressor estimator : estimators){ Model model = estimator.encodeModel(schema); models.add(model); } Segmentation.MultipleModelMethod multipleModelMethod = (weights != null && weights.size() > 0 ? Segmentation.MultipleModelMethod.WEIGHTED_AVERAGE : Segmentation.MultipleModelMethod.AVERAGE); MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel())) .setSegmentation(MiningModelUtil.createSegmentation(multipleModelMethod, models, weights)); return miningModel; }
Example #2
Source File: RangerConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 6 votes |
private MiningModel encodeRegression(RGenericVector ranger, Schema schema){ RGenericVector forest = ranger.getGenericElement("forest"); ScoreEncoder scoreEncoder = new ScoreEncoder(){ @Override public Node encode(Node node, Number splitValue, RNumberVector<?> terminalClassCount){ node.setScore(splitValue); return node; } }; List<TreeModel> treeModels = encodeForest(forest, MiningFunction.REGRESSION, scoreEncoder, schema); MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel())) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.AVERAGE, treeModels)); return miningModel; }
Example #3
Source File: BoostingConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 6 votes |
@Override public Model encodeModel(Schema schema){ RGenericVector boosting = getObject(); RGenericVector trees = boosting.getGenericElement("trees"); RDoubleVector weights = boosting.getDoubleElement("weights"); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<TreeModel> treeModels = encodeTreeModels(trees); MiningModel miningModel = new MiningModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.WEIGHTED_MAJORITY_VOTE, treeModels, weights.getValues())) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); return miningModel; }
Example #4
Source File: RPartConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 6 votes |
private TreeModel encodeRegression(RGenericVector frame, RIntegerVector rowNames, RIntegerVector var, RIntegerVector n, int[][] splitInfo, RNumberVector<?> splits, RIntegerVector csplit, Schema schema){ RNumberVector<?> yval = frame.getNumericElement("yval"); ScoreEncoder scoreEncoder = new ScoreEncoder(){ @Override public Node encode(Node node, int offset){ Number score = yval.getValue(offset); Number recordCount = n.getValue(offset); node .setScore(score) .setRecordCount(recordCount); return node; } }; Node root = encodeNode(True.INSTANCE, 1, rowNames, var, n, splitInfo, splits, csplit, scoreEncoder, schema); TreeModel treeModel = new TreeModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel()), root); return configureTreeModel(treeModel); }
Example #5
Source File: RandomForestConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 6 votes |
private <P extends Number> TreeModel encodeTreeModel(MiningFunction miningFunction, ScoreEncoder<P> scoreEncoder, List<? extends Number> leftDaughter, List<? extends Number> rightDaughter, List<P> nodepred, List<? extends Number> bestvar, List<Double> xbestsplit, Schema schema){ RGenericVector randomForest = getObject(); Node root = encodeNode(True.INSTANCE, 0, scoreEncoder, leftDaughter, rightDaughter, bestvar, xbestsplit, nodepred, new CategoryManager(), schema); TreeModel treeModel = new TreeModel(miningFunction, ModelUtil.createMiningSchema(schema.getLabel()), root) .setMissingValueStrategy(TreeModel.MissingValueStrategy.NULL_PREDICTION) .setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT); if(this.compact){ Visitor visitor = new RandomForestCompactor(); visitor.applyTo(treeModel); } return treeModel; }
Example #6
Source File: MiningModelUtil.java From pyramid with Apache License 2.0 | 6 votes |
static public MiningModel createModelChain(List<? extends Model> models, Schema schema){ if(models.size() < 1){ throw new IllegalArgumentException(); } Segmentation segmentation = createSegmentation(Segmentation.MultipleModelMethod.MODEL_CHAIN, models); Model lastModel = Iterables.getLast(models); MiningModel miningModel = new MiningModel(lastModel.getMiningFunction(), ModelUtil.createMiningSchema(schema.getLabel())) .setMathContext(ModelUtil.simplifyMathContext(lastModel.getMathContext())) .setSegmentation(segmentation); return miningModel; }
Example #7
Source File: LinearSVCModelConverter.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeModel(Schema schema){ LinearSVCModel model = getTransformer(); Transformation transformation = new AbstractTransformation(){ @Override public Expression createExpression(FieldRef fieldRef){ return PMMLUtil.createApply(PMMLFunctions.THRESHOLD) .addExpressions(fieldRef, PMMLUtil.createConstant(model.getThreshold())); } }; Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); Model linearModel = LinearModelUtil.createRegression(this, model.coefficients(), model.intercept(), segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("margin"), OpType.CONTINUOUS, DataType.DOUBLE, transformation)); return MiningModelUtil.createBinaryLogisticClassification(linearModel, 1d, 0d, RegressionModel.NormalizationMethod.NONE, false, schema); }
Example #8
Source File: AdaConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 6 votes |
@Override public Model encodeModel(Schema schema){ RGenericVector ada = getObject(); RGenericVector model = ada.getGenericElement("model"); RGenericVector trees = model.getGenericElement("trees"); RDoubleVector alpha = model.getDoubleElement("alpha"); List<TreeModel> treeModels = encodeTreeModels(trees); MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(null)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.WEIGHTED_SUM, treeModels, alpha.getValues())) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("adaValue"), OpType.CONTINUOUS, DataType.DOUBLE)); return MiningModelUtil.createBinaryLogisticClassification(miningModel, 2d, 0d, RegressionModel.NormalizationMethod.LOGIT, true, schema); }
Example #9
Source File: GeneralizedLinearRegressionModelConverter.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 6 votes |
@Override public List<OutputField> registerOutputFields(Label label, Model pmmlModel, SparkMLEncoder encoder){ GeneralizedLinearRegressionModel model = getTransformer(); List<OutputField> result = super.registerOutputFields(label, pmmlModel, encoder); MiningFunction miningFunction = getMiningFunction(); switch(miningFunction){ case CLASSIFICATION: CategoricalLabel categoricalLabel = (CategoricalLabel)label; result = new ArrayList<>(result); result.addAll(ModelUtil.createProbabilityFields(DataType.DOUBLE, categoricalLabel.getValues())); break; default: break; } return result; }
Example #10
Source File: GBDTLRClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 6 votes |
@Override public Model encodeModel(Schema schema){ Classifier gbdt = getGBDT(); MultiOneHotEncoder ohe = getOHE(); LinearClassifier lr = getLR(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); SchemaUtil.checkSize(2, categoricalLabel); List<? extends Number> coef = lr.getCoef(); List<? extends Number> intercept = lr.getIntercept(); Schema segmentSchema = schema.toAnonymousSchema(); MiningModel miningModel = GBDTUtil.encodeModel(gbdt, ohe, coef, Iterables.getOnlyElement(intercept), segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction"), OpType.CONTINUOUS, DataType.DOUBLE)); return MiningModelUtil.createBinaryLogisticClassification(miningModel, 1d, 0d, RegressionModel.NormalizationMethod.LOGIT, lr.hasProbabilityDistribution(), schema); }
Example #11
Source File: MultinomialLogisticRegression.java From jpmml-xgboost with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeMiningModel(List<RegTree> trees, List<Float> weights, float base_score, Integer ntreeLimit, Schema schema){ Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.FLOAT); List<MiningModel> miningModels = new ArrayList<>(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); for(int i = 0, columns = categoricalLabel.size(), rows = (trees.size() / columns); i < columns; i++){ MiningModel miningModel = createMiningModel(CMatrixUtil.getColumn(trees, rows, columns, i), (weights != null) ? CMatrixUtil.getColumn(weights, rows, columns, i) : null, base_score, ntreeLimit, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("xgbValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.FLOAT)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example #12
Source File: KMeansModelConverter.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 6 votes |
@Override public ClusteringModel encodeModel(Schema schema){ KMeansModel model = getTransformer(); List<Cluster> clusters = new ArrayList<>(); Vector[] clusterCenters = model.clusterCenters(); for(int i = 0; i < clusterCenters.length; i++){ Cluster cluster = new Cluster(PMMLUtil.createRealArray(VectorUtil.toList(clusterCenters[i]))) .setId(String.valueOf(i)); clusters.add(cluster); } ComparisonMeasure comparisonMeasure = new ComparisonMeasure(ComparisonMeasure.Kind.DISTANCE, new SquaredEuclidean()) .setCompareFunction(CompareFunction.ABS_DIFF); return new ClusteringModel(MiningFunction.CLUSTERING, ClusteringModel.ModelClass.CENTER_BASED, clusters.size(), ModelUtil.createMiningSchema(schema.getLabel()), comparisonMeasure, ClusteringModelUtil.createClusteringFields(schema.getFeatures()), clusters); }
Example #13
Source File: TreeUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 6 votes |
static public <E extends Estimator & HasTree> TreeModel encodeTreeModel(E estimator, PredicateManager predicateManager, ScoreDistributionManager scoreDistributionManager, MiningFunction miningFunction, Schema schema){ Tree tree = estimator.getTree(); int[] leftChildren = tree.getChildrenLeft(); int[] rightChildren = tree.getChildrenRight(); int[] features = tree.getFeature(); double[] thresholds = tree.getThreshold(); double[] values = tree.getValues(); Node root = encodeNode(True.INSTANCE, predicateManager, scoreDistributionManager, 0, leftChildren, rightChildren, features, thresholds, values, miningFunction, schema); TreeModel treeModel = new TreeModel(miningFunction, ModelUtil.createMiningSchema(schema.getLabel()), root) .setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT); ClassDictUtil.clearContent(tree); return treeModel; }
Example #14
Source File: TreePredictorUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 6 votes |
static public TreeModel encodeTreeModel(TreePredictor treePredictor, PredicateManager predicateManager, Schema schema){ int[] leaf = treePredictor.isLeaf(); int[] leftChildren = treePredictor.getLeft(); int[] rightChildren = treePredictor.getRight(); int[] featureIdx = treePredictor.getFeatureIdx(); double[] thresholds = treePredictor.getThreshold(); int[] missingGoToLeft = treePredictor.getMissingGoToLeft(); double[] values = treePredictor.getValues(); Node root = encodeNode(True.INSTANCE, predicateManager, 0, leaf, leftChildren, rightChildren, featureIdx, thresholds, missingGoToLeft, values, schema); TreeModel treeModel = new TreeModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel()), root) .setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT) .setMissingValueStrategy(TreeModel.MissingValueStrategy.DEFAULT_CHILD); return treeModel; }
Example #15
Source File: AdaBoostRegressor.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeModel(Schema schema){ List<? extends Regressor> estimators = getEstimators(); List<? extends Number> estimatorWeights = getEstimatorWeights(); Schema segmentSchema = schema.toAnonymousSchema(); List<Model> models = new ArrayList<>(); for(Regressor estimator : estimators){ Model model = estimator.encodeModel(segmentSchema); models.add(model); } MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel())) .setSegmentation(MiningModelUtil.createSegmentation(MultipleModelMethod.WEIGHTED_MEDIAN, models, estimatorWeights)); return miningModel; }
Example #16
Source File: BaggingClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeModel(Schema schema){ List<? extends Classifier> estimators = getEstimators(); List<List<Integer>> estimatorsFeatures = getEstimatorsFeatures(); Segmentation.MultipleModelMethod multipleModelMethod = Segmentation.MultipleModelMethod.AVERAGE; for(Classifier estimator : estimators){ if(!estimator.hasProbabilityDistribution()){ multipleModelMethod = Segmentation.MultipleModelMethod.MAJORITY_VOTE; break; } } MiningModel miningModel = BaggingUtil.encodeBagging(estimators, estimatorsFeatures, multipleModelMethod, MiningFunction.CLASSIFICATION, schema) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)schema.getLabel())); return miningModel; }
Example #17
Source File: MultinomialLogisticRegression.java From jpmml-lightgbm with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeMiningModel(List<Tree> trees, Integer numIteration, Schema schema){ Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); List<MiningModel> miningModels = new ArrayList<>(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); for(int i = 0, rows = categoricalLabel.size(), columns = (trees.size() / rows); i < rows; i++){ MiningModel miningModel = createMiningModel(FortranMatrixUtil.getRow(trees, rows, columns, i), numIteration, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("lgbmValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example #18
Source File: RangerConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
private TreeModel encodeTreeModel(MiningFunction miningFunction, ScoreEncoder scoreEncoder, RGenericVector childNodeIDs, RNumberVector<?> splitVarIDs, RNumberVector<?> splitValues, RGenericVector terminalClassCounts, Schema schema){ RNumberVector<?> leftChildIDs = (RNumberVector<?>)childNodeIDs.getValue(0); RNumberVector<?> rightChildIDs = (RNumberVector<?>)childNodeIDs.getValue(1); Node root = encodeNode(True.INSTANCE, 0, scoreEncoder, leftChildIDs, rightChildIDs, splitVarIDs, splitValues, terminalClassCounts, new CategoryManager(), schema); TreeModel treeModel = new TreeModel(miningFunction, ModelUtil.createMiningSchema(schema.getLabel()), root) .setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT); return treeModel; }
Example #19
Source File: RuleSetClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
@Override public RuleSetModel encodeModel(Schema schema){ String defaultScore = getDefaultScore(); List<Object[]> rules = getRules(); Label label = schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); RuleSelectionMethod ruleSelectionMethod = new RuleSelectionMethod(RuleSelectionMethod.Criterion.FIRST_HIT); RuleSet ruleSet = new RuleSet() .addRuleSelectionMethods(ruleSelectionMethod); if(defaultScore != null){ ruleSet .setDefaultConfidence(1d) .setDefaultScore(defaultScore); } Scope scope = new DataFrameScope(FieldName.create("X"), features); for(Object[] rule : rules){ String predicate = TupleUtil.extractElement(rule, 0, String.class); String score = TupleUtil.extractElement(rule, 1, String.class); Predicate pmmlPredicate = PredicateTranslator.translate(predicate, scope); SimpleRule simpleRule = new SimpleRule(score, pmmlPredicate); ruleSet.addRules(simpleRule); } RuleSetModel ruleSetModel = new RuleSetModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(label), ruleSet); return ruleSetModel; }
Example #20
Source File: FishNetConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
@Override public Model encodeModel(RDoubleVector a0, RExp beta, int column, Schema schema){ Double intercept = a0.getValue(column); List<Double> coefficients = getCoefficients((S4Object)beta, column); GeneralRegressionModel generalRegressionModel = new GeneralRegressionModel(GeneralRegressionModel.ModelType.GENERAL_LINEAR, MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel()), null, null, null) .setDistribution(GeneralRegressionModel.Distribution.POISSON); GeneralRegressionModelUtil.encodeRegressionTable(generalRegressionModel, schema.getFeatures(), coefficients, intercept, null); return generalRegressionModel; }
Example #21
Source File: MVRConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
@Override public GeneralRegressionModel encodeModel(Schema schema){ RGenericVector mvr = getObject(); RDoubleVector coefficients = mvr.getDoubleElement("coefficients"); RDoubleVector xMeans = mvr.getDoubleElement("Xmeans"); RDoubleVector yMeans = mvr.getDoubleElement("Ymeans"); RNumberVector<?> ncomp = mvr.getNumericElement("ncomp"); RStringVector rowNames = coefficients.dimnames(0); RStringVector columnNames = coefficients.dimnames(1); RStringVector compNames = coefficients.dimnames(2); int rows = rowNames.size(); int columns = columnNames.size(); int components = compNames.size(); List<? extends Feature> features = schema.getFeatures(); List<Double> featureCoefficients = FortranMatrixUtil.getColumn(coefficients.getValues(), rows, (columns * components), 0 + (ValueUtil.asInt(ncomp.asScalar()) - 1)); Double intercept = yMeans.getValue(0); for(int j = 0; j < rowNames.size(); j++){ intercept -= (featureCoefficients.get(j) * xMeans.getValue(j)); } GeneralRegressionModel generalRegressionModel = new GeneralRegressionModel(GeneralRegressionModel.ModelType.GENERALIZED_LINEAR, MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel()), null, null, null) .setLinkFunction(GeneralRegressionModel.LinkFunction.IDENTITY); GeneralRegressionModelUtil.encodeRegressionTable(generalRegressionModel, features, featureCoefficients, intercept, null); return generalRegressionModel; }
Example #22
Source File: BinaryTreeConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
private TreeModel encodeTreeModel(RGenericVector tree, Schema schema){ Node root = encodeNode(True.INSTANCE, tree, schema); TreeModel treeModel = new TreeModel(this.miningFunction, ModelUtil.createMiningSchema(schema.getLabel()), root) .setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT); return treeModel; }
Example #23
Source File: KMeansConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
@Override public Model encodeModel(Schema schema){ RGenericVector kmeans = getObject(); RDoubleVector centers = kmeans.getDoubleElement("centers"); RIntegerVector size = kmeans.getIntegerElement("size"); RIntegerVector centersDim = centers.dim(); int rows = centersDim.getValue(0); int columns = centersDim.getValue(1); List<Cluster> clusters = new ArrayList<>(); RStringVector rowNames = centers.dimnames(0); for(int i = 0; i < rowNames.size(); i++){ Cluster cluster = new Cluster(PMMLUtil.createRealArray(FortranMatrixUtil.getRow(centers.getValues(), rows, columns, i))) .setId(String.valueOf(i + 1)) .setName(rowNames.getValue(i)) .setSize(size.getValue(i)); clusters.add(cluster); } ComparisonMeasure comparisonMeasure = new ComparisonMeasure(ComparisonMeasure.Kind.DISTANCE, new SquaredEuclidean()) .setCompareFunction(CompareFunction.ABS_DIFF); ClusteringModel clusteringModel = new ClusteringModel(MiningFunction.CLUSTERING, ClusteringModel.ModelClass.CENTER_BASED, rows, ModelUtil.createMiningSchema(schema.getLabel()), comparisonMeasure, ClusteringModelUtil.createClusteringFields(schema.getFeatures()), clusters) .setOutput(ClusteringModelUtil.createOutput(FieldName.create("cluster"), DataType.DOUBLE, clusters)); return clusteringModel; }
Example #24
Source File: IForestConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
private TreeModel encodeTreeModel(RGenericVector trees, int index, Schema schema){ RIntegerVector nrnodes = trees.getIntegerElement("nrnodes"); RIntegerVector ntree = trees.getIntegerElement("ntree"); RIntegerVector nodeStatus = trees.getIntegerElement("nodeStatus"); RIntegerVector leftDaughter = trees.getIntegerElement("lDaughter"); RIntegerVector rightDaughter = trees.getIntegerElement("rDaughter"); RIntegerVector splitAtt = trees.getIntegerElement("splitAtt"); RDoubleVector splitPoint = trees.getDoubleElement("splitPoint"); RIntegerVector nSam = trees.getIntegerElement("nSam"); int rows = nrnodes.asScalar(); int columns = ntree.asScalar(); Node root = encodeNode( True.INSTANCE, 0, 0, FortranMatrixUtil.getColumn(nodeStatus.getValues(), rows, columns, index), FortranMatrixUtil.getColumn(nSam.getValues(), rows, columns, index), FortranMatrixUtil.getColumn(leftDaughter.getValues(), rows, columns, index), FortranMatrixUtil.getColumn(rightDaughter.getValues(), rows, columns, index), FortranMatrixUtil.getColumn(splitAtt.getValues(), rows, columns, index), FortranMatrixUtil.getColumn(splitPoint.getValues(), rows, columns, index), schema ); TreeModel treeModel = new TreeModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel()), root) .setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT); return treeModel; }
Example #25
Source File: TreeClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
@Override public TreeModel encodeModel(Schema schema){ TreeModel treeModel = TreeUtil.encodeTreeModel(this, MiningFunction.CLASSIFICATION, schema) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)schema.getLabel())); return TreeUtil.transform(this, treeModel); }
Example #26
Source File: GradientBoostingUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
static public <E extends Estimator & HasEstimatorEnsemble<TreeRegressor> & HasTreeOptions> MiningModel encodeGradientBoosting(E estimator, Number initialPrediction, Number learningRate, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); List<TreeModel> treeModels = TreeUtil.encodeTreeModelEnsemble(estimator, MiningFunction.REGRESSION, schema); MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.SUM, treeModels)) .setTargets(ModelUtil.createRescaleTargets(learningRate, initialPrediction, continuousLabel)); return TreeUtil.transform(estimator, miningModel); }
Example #27
Source File: GBMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
private MiningModel encodeBinaryClassification(List<TreeModel> treeModels, Double initF, double coefficient, Schema schema){ Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); MiningModel miningModel = createMiningModel(treeModels, initF, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("gbmValue"), OpType.CONTINUOUS, DataType.DOUBLE)); return MiningModelUtil.createBinaryLogisticClassification(miningModel, -coefficient, 0d, RegressionModel.NormalizationMethod.LOGIT, true, schema); }
Example #28
Source File: ForestUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
static public <E extends Estimator & HasEstimatorEnsemble<T> & HasTreeOptions, T extends Estimator & HasTree> MiningModel encodeBaseForest(E estimator, Segmentation.MultipleModelMethod multipleModelMethod, MiningFunction miningFunction, Schema schema){ List<TreeModel> treeModels = TreeUtil.encodeTreeModelEnsemble(estimator, miningFunction, schema); MiningModel miningModel = new MiningModel(miningFunction, ModelUtil.createMiningSchema(schema.getLabel())) .setSegmentation(MiningModelUtil.createSegmentation(multipleModelMethod, treeModels)); return TreeUtil.transform(estimator, miningModel); }
Example #29
Source File: GBMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
private TreeModel encodeTreeModel(MiningFunction miningFunction, RGenericVector tree, RGenericVector c_splits, Schema schema){ Node root = encodeNode(True.INSTANCE, 0, tree, c_splits, new FlagManager(), new CategoryManager(), schema); TreeModel treeModel = new TreeModel(miningFunction, ModelUtil.createMiningSchema(schema.getLabel()), root) .setSplitCharacteristic(TreeModel.SplitCharacteristic.MULTI_SPLIT); return treeModel; }
Example #30
Source File: OneClassSVM.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
@Override public SupportVectorMachineModel encodeModel(Schema schema){ Transformation outlier = new OutlierTransformation(){ @Override public Expression createExpression(FieldRef fieldRef){ return PMMLUtil.createApply(PMMLFunctions.LESSOREQUAL, fieldRef, PMMLUtil.createConstant(0d)); } }; SupportVectorMachineModel supportVectorMachineModel = super.encodeModel(schema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction"), OpType.CONTINUOUS, DataType.DOUBLE, outlier)); Output output = supportVectorMachineModel.getOutput(); List<OutputField> outputFields = output.getOutputFields(); if(outputFields.size() != 2){ throw new IllegalArgumentException(); } OutputField decisionFunctionOutputField = outputFields.get(0); if(!decisionFunctionOutputField.isFinalResult()){ decisionFunctionOutputField.setFinalResult(true); } return supportVectorMachineModel; }