org.jpmml.converter.ContinuousLabel Java Examples
The following examples show how to use
org.jpmml.converter.ContinuousLabel.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Regression.java From jpmml-lightgbm with GNU Affero General Public License v3.0 | 5 votes |
@Override public Label encodeLabel(FieldName targetField, List<?> targetCategories, PMMLEncoder encoder){ if(targetCategories != null && targetCategories.size() > 0){ throw new IllegalArgumentException("Regression requires zero target categories"); } DataField dataField = encoder.createDataField(targetField, OpType.CONTINUOUS, DataType.DOUBLE); return new ContinuousLabel(dataField); }
Example #2
Source File: LinearRegressor.java From jpmml-tensorflow with GNU Affero General Public License v3.0 | 5 votes |
@Override public RegressionModel encodeModel(TensorFlowEncoder encoder){ DataField dataField = encoder.createDataField(FieldName.create("_target"), OpType.CONTINUOUS, DataType.FLOAT); Label label = new ContinuousLabel(dataField); RegressionModel regressionModel = encodeRegressionModel(encoder) .setMiningFunction(MiningFunction.REGRESSION) .setMiningSchema(ModelUtil.createMiningSchema(label)); return regressionModel; }
Example #3
Source File: Regression.java From jpmml-xgboost with GNU Affero General Public License v3.0 | 5 votes |
@Override public Label encodeLabel(FieldName targetField, List<?> targetCategories, PMMLEncoder encoder){ if(targetCategories != null){ throw new IllegalArgumentException("Regression requires zero target categories"); } DataField dataField = encoder.createDataField(targetField, OpType.CONTINUOUS, DataType.FLOAT); return new ContinuousLabel(dataField); }
Example #4
Source File: GradientBoostingUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
static public <E extends Estimator & HasEstimatorEnsemble<TreeRegressor> & HasTreeOptions> MiningModel encodeGradientBoosting(E estimator, Number initialPrediction, Number learningRate, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); List<TreeModel> treeModels = TreeUtil.encodeTreeModelEnsemble(estimator, MiningFunction.REGRESSION, schema); MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.SUM, treeModels)) .setTargets(ModelUtil.createRescaleTargets(learningRate, initialPrediction, continuousLabel)); return TreeUtil.transform(estimator, miningModel); }
Example #5
Source File: SVMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
private void encodeFormula(RExpEncoder encoder){ RGenericVector svm = getObject(); RDoubleVector type = svm.getDoubleElement("type"); RDoubleVector sv = svm.getDoubleElement("SV"); RVector<?> levels = svm.getVectorElement("levels"); RExp terms = svm.getElement("terms"); RGenericVector xlevels = DecorationUtil.getGenericElement(svm, "xlevels"); Type svmType = Type.values()[ValueUtil.asInt(type.asScalar())]; RStringVector rowNames = sv.dimnames(0); RStringVector columnNames = sv.dimnames(1); FormulaContext context = new XLevelsFormulaContext(xlevels); Formula formula = FormulaUtil.createFormula(terms, context, encoder); switch(svmType){ case C_CLASSIFICATION: case NU_CLASSIFICATION: FormulaUtil.setLabel(formula, terms, levels, encoder); break; case ONE_CLASSIFICATION: encoder.setLabel(new ContinuousLabel(null, DataType.DOUBLE)); break; case EPS_REGRESSION: case NU_REGRESSION: FormulaUtil.setLabel(formula, terms, null, encoder); break; } FormulaUtil.addFeatures(formula, columnNames, true, encoder); scaleFeatures(encoder); }
Example #6
Source File: GBMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 5 votes |
static private MiningModel createMiningModel(List<TreeModel> treeModels, Double initF, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.SUM, treeModels)) .setTargets(ModelUtil.createRescaleTargets(null, initF, continuousLabel)); return miningModel; }
Example #7
Source File: DNNRegressor.java From jpmml-tensorflow with GNU Affero General Public License v3.0 | 4 votes |
@Override public NeuralNetwork encodeModel(TensorFlowEncoder encoder){ DataField dataField = encoder.createDataField(FieldName.create("_target"), OpType.CONTINUOUS, DataType.FLOAT); NeuralNetwork neuralNetwork = encodeNeuralNetwork(encoder); List<NeuralLayer> neuralLayers = neuralNetwork.getNeuralLayers(); NeuralLayer neuralLayer = Iterables.getLast(neuralLayers); neuralLayer.setActivationFunction(NeuralNetwork.ActivationFunction.IDENTITY); List<Neuron> neurons = neuralLayer.getNeurons(); ContinuousLabel continuousLabel = new ContinuousLabel(dataField); neuralNetwork .setMiningFunction(MiningFunction.REGRESSION) .setMiningSchema(ModelUtil.createMiningSchema(continuousLabel)) .setNeuralOutputs(NeuralNetworkUtil.createRegressionNeuralOutputs(neurons, continuousLabel)); return neuralNetwork; }
Example #8
Source File: LinearModelUtil.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 4 votes |
static public <C extends ModelConverter<?> & HasRegressionTableOptions> Model createRegression(C converter, Vector coefficients, double intercept, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); String representation = (String)converter.getOption(HasRegressionTableOptions.OPTION_REPRESENTATION, null); List<Feature> features = new ArrayList<>(schema.getFeatures()); List<Double> featureCoefficients = new ArrayList<>(VectorUtil.toList(coefficients)); RegressionTableUtil.simplify(converter, null, features, featureCoefficients); if(representation != null && (GeneralRegressionModel.class.getSimpleName()).equalsIgnoreCase(representation)){ GeneralRegressionModel generalRegressionModel = new GeneralRegressionModel(GeneralRegressionModel.ModelType.REGRESSION, MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel), null, null, null); GeneralRegressionModelUtil.encodeRegressionTable(generalRegressionModel, features, featureCoefficients, intercept, null); return generalRegressionModel; } return RegressionModelUtil.createRegression(features, featureCoefficients, intercept, NormalizationMethod.NONE, schema); }
Example #9
Source File: ObjFunction.java From jpmml-xgboost with GNU Affero General Public License v3.0 | 4 votes |
static protected MiningModel createMiningModel(List<RegTree> trees, List<Float> weights, float base_score, Integer ntreeLimit, Schema schema){ if(weights != null){ if(trees.size() != weights.size()){ throw new IllegalArgumentException(); } } // End if if(ntreeLimit != null){ if(ntreeLimit > trees.size()){ throw new IllegalArgumentException("Tree limit " + ntreeLimit + " is greater than the number of trees"); } trees = trees.subList(0, ntreeLimit); if(weights != null){ weights = weights.subList(0, ntreeLimit); } } // End if if(weights != null){ weights = new ArrayList<>(weights); } ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); Schema segmentSchema = schema.toAnonymousSchema(); PredicateManager predicateManager = new PredicateManager(); List<TreeModel> treeModels = new ArrayList<>(); boolean equalWeights = true; Iterator<RegTree> treeIt = trees.iterator(); Iterator<Float> weightIt = (weights != null ? weights.iterator() : null); while(treeIt.hasNext()){ RegTree tree = treeIt.next(); Float weight = (weightIt != null ? weightIt.next() : null); if(tree.isEmpty()){ weightIt.remove(); continue; } // End if if(weight != null){ equalWeights &= ValueUtil.isOne(weight); } TreeModel treeModel = tree.encodeTreeModel(predicateManager, segmentSchema); treeModels.add(treeModel); } MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel)) .setMathContext(MathContext.FLOAT) .setSegmentation(MiningModelUtil.createSegmentation(equalWeights ? Segmentation.MultipleModelMethod.SUM : Segmentation.MultipleModelMethod.WEIGHTED_SUM, treeModels, weights)) .setTargets(ModelUtil.createRescaleTargets(null, base_score, continuousLabel)); return miningModel; }
Example #10
Source File: HistGradientBoostingUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 4 votes |
static public MiningModel encodeHistGradientBoosting(List<TreePredictor> treePredictors, Number baselinePrediction, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); PredicateManager predicateManager = new PredicateManager(); Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); List<TreeModel> treeModels = new ArrayList<>(); for(TreePredictor treePredictor : treePredictors){ TreeModel treeModel = TreePredictorUtil.encodeTreeModel(treePredictor, predicateManager, segmentSchema); treeModels.add(treeModel); } MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.SUM, treeModels)) .setTargets(ModelUtil.createRescaleTargets(null, baselinePrediction, continuousLabel)); return miningModel; }
Example #11
Source File: MultilayerPerceptronUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 4 votes |
static public NeuralNetwork encodeNeuralNetwork(MiningFunction miningFunction, String activation, List<? extends HasArray> coefs, List<? extends HasArray> intercepts, Schema schema){ NeuralNetwork.ActivationFunction activationFunction = parseActivationFunction(activation); ClassDictUtil.checkSize(coefs, intercepts); Label label = schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE); List<? extends NeuralEntity> entities = neuralInputs.getNeuralInputs(); List<NeuralLayer> neuralLayers = new ArrayList<>(); for(int layer = 0; layer < coefs.size(); layer++){ HasArray coef = coefs.get(layer); HasArray intercept = intercepts.get(layer); int[] shape = coef.getArrayShape(); int rows = shape[0]; int columns = shape[1]; NeuralLayer neuralLayer = new NeuralLayer(); List<?> coefMatrix = coef.getArrayContent(); List<?> interceptVector = intercept.getArrayContent(); for(int column = 0; column < columns; column++){ List<? extends Number> weights = (List)CMatrixUtil.getColumn(coefMatrix, rows, columns, column); Number bias = (Number)interceptVector.get(column); Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias) .setId(String.valueOf(layer + 1) + "/" + String.valueOf(column + 1)); neuralLayer.addNeurons(neuron); } neuralLayers.add(neuralLayer); entities = neuralLayer.getNeurons(); if(layer == (coefs.size() - 1)){ neuralLayer.setActivationFunction(NeuralNetwork.ActivationFunction.IDENTITY); switch(miningFunction){ case REGRESSION: break; case CLASSIFICATION: CategoricalLabel categoricalLabel = (CategoricalLabel)label; // Binary classification if(categoricalLabel.size() == 2){ List<NeuralLayer> transformationNeuralLayers = NeuralNetworkUtil.createBinaryLogisticTransformation(Iterables.getOnlyElement(entities)); neuralLayers.addAll(transformationNeuralLayers); neuralLayer = Iterables.getLast(transformationNeuralLayers); entities = neuralLayer.getNeurons(); } else // Multi-class classification if(categoricalLabel.size() > 2){ neuralLayer.setNormalizationMethod(NeuralNetwork.NormalizationMethod.SOFTMAX); } else { throw new IllegalArgumentException(); } break; default: break; } } } NeuralOutputs neuralOutputs = null; switch(miningFunction){ case REGRESSION: neuralOutputs = NeuralNetworkUtil.createRegressionNeuralOutputs(entities, (ContinuousLabel)label); break; case CLASSIFICATION: neuralOutputs = NeuralNetworkUtil.createClassificationNeuralOutputs(entities, (CategoricalLabel)label); break; default: break; } NeuralNetwork neuralNetwork = new NeuralNetwork(miningFunction, activationFunction, ModelUtil.createMiningSchema(label), neuralInputs, neuralLayers) .setNeuralOutputs(neuralOutputs); return neuralNetwork; }
Example #12
Source File: NNConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 4 votes |
@Override public Model encodeModel(Schema schema){ RGenericVector nn = getObject(); RExp actFct = nn.getElement("act.fct"); RBooleanVector linearOutput = nn.getBooleanElement("linear.output"); RGenericVector weights = nn.getGenericElement("weights"); RStringVector actFctType = actFct.getStringAttribute("type"); // Select the first repetition weights = (RGenericVector)weights.getValue(0); NeuralNetwork.ActivationFunction activationFunction = NeuralNetwork.ActivationFunction.LOGISTIC; switch(actFctType.asScalar()){ case "logistic": activationFunction = NeuralNetwork.ActivationFunction.LOGISTIC; break; case "tanh": activationFunction = NeuralNetwork.ActivationFunction.TANH; break; default: throw new IllegalArgumentException(); } ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE); List<NeuralLayer> neuralLayers = new ArrayList<>(); List<? extends NeuralEntity> entities = neuralInputs.getNeuralInputs(); for(int i = 0; i < weights.size(); i++){ boolean hidden = (i < (weights.size() - 1)); NeuralLayer neuralLayer = new NeuralLayer(); if(hidden || (linearOutput != null && !linearOutput.asScalar())){ neuralLayer.setActivationFunction(activationFunction); } RDoubleVector layerWeights = (RDoubleVector)weights.getValue(i); RIntegerVector layerDim = layerWeights.dim(); int layerRows = layerDim.getValue(0); int layerColumns = layerDim.getValue(1); for(int j = 0; j < layerColumns; j++){ List<Double> neuronWeights = FortranMatrixUtil.getColumn(layerWeights.getValues(), layerRows, layerColumns, j); String id; if(hidden){ id = "hidden/" + String.valueOf(i) + "/" + String.valueOf(j); } else { id = "output/" + String.valueOf(j); } Neuron neuron = NeuralNetworkUtil.createNeuron(entities, neuronWeights.subList(1, neuronWeights.size()), neuronWeights.get(0)) .setId(id); neuralLayer.addNeurons(neuron); } neuralLayers.add(neuralLayer); entities = neuralLayer.getNeurons(); } NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.REGRESSION, NeuralNetwork.ActivationFunction.IDENTITY, ModelUtil.createMiningSchema(continuousLabel), neuralInputs, neuralLayers) .setNeuralOutputs(NeuralNetworkUtil.createRegressionNeuralOutputs(entities, continuousLabel)); return neuralNetwork; }
Example #13
Source File: SVMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 4 votes |
@Override public SupportVectorMachineModel encodeModel(Schema schema){ RGenericVector svm = getObject(); RDoubleVector type = svm.getDoubleElement("type"); RDoubleVector kernel = svm.getDoubleElement("kernel"); RDoubleVector degree = svm.getDoubleElement("degree"); RDoubleVector gamma = svm.getDoubleElement("gamma"); RDoubleVector coef0 = svm.getDoubleElement("coef0"); RGenericVector yScale = svm.getGenericElement("y.scale"); RIntegerVector nSv = svm.getIntegerElement("nSV"); RDoubleVector sv = svm.getDoubleElement("SV"); RDoubleVector rho = svm.getDoubleElement("rho"); RDoubleVector coefs = svm.getDoubleElement("coefs"); Type svmType = Type.values()[ValueUtil.asInt(type.asScalar())]; Kernel svmKernel = Kernel.values()[ValueUtil.asInt(kernel.asScalar())]; org.dmg.pmml.support_vector_machine.Kernel pmmlKernel = svmKernel.createKernel(degree.asScalar(), gamma.asScalar(), coef0.asScalar()); SupportVectorMachineModel supportVectorMachineModel; switch(svmType){ case C_CLASSIFICATION: case NU_CLASSIFICATION: { supportVectorMachineModel = encodeClassification(pmmlKernel, sv, nSv, rho, coefs, schema); } break; case ONE_CLASSIFICATION: { Transformation outlier = new OutlierTransformation(){ @Override public Expression createExpression(FieldRef fieldRef){ return PMMLUtil.createApply(PMMLFunctions.LESSOREQUAL, fieldRef, PMMLUtil.createConstant(0d)); } }; supportVectorMachineModel = encodeRegression(pmmlKernel, sv, rho, coefs, schema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction"), OpType.CONTINUOUS, DataType.DOUBLE, outlier)); if(yScale != null && yScale.size() > 0){ throw new IllegalArgumentException(); } } break; case EPS_REGRESSION: case NU_REGRESSION: { supportVectorMachineModel = encodeRegression(pmmlKernel, sv, rho, coefs, schema); if(yScale != null && yScale.size() > 0){ RDoubleVector yScaledCenter = yScale.getDoubleElement("scaled:center"); RDoubleVector yScaledScale = yScale.getDoubleElement("scaled:scale"); supportVectorMachineModel.setTargets(ModelUtil.createRescaleTargets(-1d * yScaledScale.asScalar(), yScaledCenter.asScalar(), (ContinuousLabel)schema.getLabel())); } } break; default: throw new IllegalArgumentException(); } return supportVectorMachineModel; }
Example #14
Source File: ObjectiveFunction.java From jpmml-lightgbm with GNU Affero General Public License v3.0 | 3 votes |
protected MiningModel createMiningModel(List<Tree> trees, Integer numIteration, Schema schema){ ContinuousLabel continuousLabel = (ContinuousLabel)schema.getLabel(); Schema segmentSchema = schema.toAnonymousSchema(); PredicateManager predicateManager = new PredicateManager(); List<TreeModel> treeModels = new ArrayList<>(); if(numIteration != null){ if(numIteration > trees.size()){ throw new IllegalArgumentException("Tree limit " + numIteration + " is greater than the number of trees"); } trees = trees.subList(0, numIteration); } for(Tree tree : trees){ TreeModel treeModel = tree.encodeTreeModel(predicateManager, segmentSchema); treeModels.add(treeModel); } MiningModel miningModel = new MiningModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(continuousLabel)) .setSegmentation(MiningModelUtil.createSegmentation(this.average_output_ ? Segmentation.MultipleModelMethod.AVERAGE : Segmentation.MultipleModelMethod.SUM, treeModels)); return miningModel; }