org.jpmml.converter.CMatrixUtil Java Examples
The following examples show how to use
org.jpmml.converter.CMatrixUtil.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MultinomialLogisticRegression.java From jpmml-xgboost with GNU Affero General Public License v3.0 | 6 votes |
@Override public MiningModel encodeMiningModel(List<RegTree> trees, List<Float> weights, float base_score, Integer ntreeLimit, Schema schema){ Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.FLOAT); List<MiningModel> miningModels = new ArrayList<>(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); for(int i = 0, columns = categoricalLabel.size(), rows = (trees.size() / columns); i < columns; i++){ MiningModel miningModel = createMiningModel(CMatrixUtil.getColumn(trees, rows, columns, i), (weights != null) ? CMatrixUtil.getColumn(weights, rows, columns, i) : null, base_score, ntreeLimit, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("xgbValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.FLOAT)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example #2
Source File: KMeans.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 5 votes |
@Override public ClusteringModel encodeModel(Schema schema){ int[] shape = getClusterCentersShape(); int numberOfClusters = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> clusterCenters = getClusterCenters(); List<Integer> labels = getLabels(); Multiset<Integer> labelCounts = HashMultiset.create(); if(labels != null){ labelCounts.addAll(labels); } List<Cluster> clusters = new ArrayList<>(); for(int i = 0; i < numberOfClusters; i++){ Cluster cluster = new Cluster(PMMLUtil.createRealArray(CMatrixUtil.getRow(clusterCenters, numberOfClusters, numberOfFeatures, i))) .setId(String.valueOf(i)) .setSize((labelCounts.size () > 0 ? labelCounts.count(i) : null)); clusters.add(cluster); } ComparisonMeasure comparisonMeasure = new ComparisonMeasure(ComparisonMeasure.Kind.DISTANCE, new SquaredEuclidean()) .setCompareFunction(CompareFunction.ABS_DIFF); ClusteringModel clusteringModel = new ClusteringModel(MiningFunction.CLUSTERING, ClusteringModel.ModelClass.CENTER_BASED, numberOfClusters, ModelUtil.createMiningSchema(schema.getLabel()), comparisonMeasure, ClusteringModelUtil.createClusteringFields(schema.getFeatures()), clusters) .setOutput(ClusteringModelUtil.createOutput(FieldName.create("Cluster"), DataType.DOUBLE, clusters)); return clusteringModel; }
Example #3
Source File: MultilayerPerceptronUtil.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 4 votes |
static public NeuralNetwork encodeNeuralNetwork(MiningFunction miningFunction, String activation, List<? extends HasArray> coefs, List<? extends HasArray> intercepts, Schema schema){ NeuralNetwork.ActivationFunction activationFunction = parseActivationFunction(activation); ClassDictUtil.checkSize(coefs, intercepts); Label label = schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE); List<? extends NeuralEntity> entities = neuralInputs.getNeuralInputs(); List<NeuralLayer> neuralLayers = new ArrayList<>(); for(int layer = 0; layer < coefs.size(); layer++){ HasArray coef = coefs.get(layer); HasArray intercept = intercepts.get(layer); int[] shape = coef.getArrayShape(); int rows = shape[0]; int columns = shape[1]; NeuralLayer neuralLayer = new NeuralLayer(); List<?> coefMatrix = coef.getArrayContent(); List<?> interceptVector = intercept.getArrayContent(); for(int column = 0; column < columns; column++){ List<? extends Number> weights = (List)CMatrixUtil.getColumn(coefMatrix, rows, columns, column); Number bias = (Number)interceptVector.get(column); Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias) .setId(String.valueOf(layer + 1) + "/" + String.valueOf(column + 1)); neuralLayer.addNeurons(neuron); } neuralLayers.add(neuralLayer); entities = neuralLayer.getNeurons(); if(layer == (coefs.size() - 1)){ neuralLayer.setActivationFunction(NeuralNetwork.ActivationFunction.IDENTITY); switch(miningFunction){ case REGRESSION: break; case CLASSIFICATION: CategoricalLabel categoricalLabel = (CategoricalLabel)label; // Binary classification if(categoricalLabel.size() == 2){ List<NeuralLayer> transformationNeuralLayers = NeuralNetworkUtil.createBinaryLogisticTransformation(Iterables.getOnlyElement(entities)); neuralLayers.addAll(transformationNeuralLayers); neuralLayer = Iterables.getLast(transformationNeuralLayers); entities = neuralLayer.getNeurons(); } else // Multi-class classification if(categoricalLabel.size() > 2){ neuralLayer.setNormalizationMethod(NeuralNetwork.NormalizationMethod.SOFTMAX); } else { throw new IllegalArgumentException(); } break; default: break; } } } NeuralOutputs neuralOutputs = null; switch(miningFunction){ case REGRESSION: neuralOutputs = NeuralNetworkUtil.createRegressionNeuralOutputs(entities, (ContinuousLabel)label); break; case CLASSIFICATION: neuralOutputs = NeuralNetworkUtil.createClassificationNeuralOutputs(entities, (CategoricalLabel)label); break; default: break; } NeuralNetwork neuralNetwork = new NeuralNetwork(miningFunction, activationFunction, ModelUtil.createMiningSchema(label), neuralInputs, neuralLayers) .setNeuralOutputs(neuralOutputs); return neuralNetwork; }
Example #4
Source File: LinearDiscriminantAnalysis.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 4 votes |
private Model encodeMultinomialModel(Schema schema){ String sklearnVersion = getSkLearnVersion(); int[] shape = getCoefShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); // See https://github.com/scikit-learn/scikit-learn/issues/6848 boolean corrected = (sklearnVersion != null && SkLearnUtil.compareVersion(sklearnVersion, "0.21") >= 0); if(!corrected){ return super.encodeModel(schema); } // End if if(numberOfClasses >= 3){ SchemaUtil.checkSize(numberOfClasses, categoricalLabel); Schema segmentSchema = (schema.toAnonymousRegressorSchema(DataType.DOUBLE)).toEmptySchema(); List<RegressionModel> regressionModels = new ArrayList<>(); for(int i = 0, rows = categoricalLabel.size(); i < rows; i++){ RegressionModel regressionModel = RegressionModelUtil.createRegression(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, i), intercept.get(i), RegressionModel.NormalizationMethod.NONE, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); regressionModels.add(regressionModel); } return MiningModelUtil.createClassification(regressionModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); } else { throw new IllegalArgumentException(); } }
Example #5
Source File: GBMConverter.java From jpmml-r with GNU Affero General Public License v3.0 | 4 votes |
private MiningModel encodeMultinomialClassification(List<TreeModel> treeModels, Double initF, Schema schema){ CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); Schema segmentSchema = schema.toAnonymousRegressorSchema(DataType.DOUBLE); List<Model> miningModels = new ArrayList<>(); for(int i = 0, columns = categoricalLabel.size(), rows = (treeModels.size() / columns); i < columns; i++){ MiningModel miningModel = createMiningModel(CMatrixUtil.getColumn(treeModels, rows, columns, i), initF, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("gbmValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); miningModels.add(miningModel); } return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema); }
Example #6
Source File: GaussianNB.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 3 votes |
@Override public NaiveBayesModel encodeModel(Schema schema){ int[] shape = getThetaShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> theta = getTheta(); List<? extends Number> sigma = getSigma(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); BayesInputs bayesInputs = new BayesInputs(); for(int i = 0; i < numberOfFeatures; i++){ Feature feature = schema.getFeature(i); List<? extends Number> means = CMatrixUtil.getColumn(theta, numberOfClasses, numberOfFeatures, i); List<? extends Number> variances = CMatrixUtil.getColumn(sigma, numberOfClasses, numberOfFeatures, i); ContinuousFeature continuousFeature = feature.toContinuousFeature(); BayesInput bayesInput = new BayesInput(continuousFeature.getName(), encodeTargetValueStats(categoricalLabel.getValues(), means, variances), null); bayesInputs.addBayesInputs(bayesInput); } List<Integer> classCount = getClassCount(); BayesOutput bayesOutput = new BayesOutput(categoricalLabel.getName(), null) .setTargetValueCounts(encodeTargetValueCounts(categoricalLabel.getValues(), classCount)); NaiveBayesModel naiveBayesModel = new NaiveBayesModel(0d, MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), bayesInputs, bayesOutput) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); return naiveBayesModel; }
Example #7
Source File: LinearClassifier.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 2 votes |
@Override public Model encodeModel(Schema schema){ int[] shape = getCoefShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; boolean hasProbabilityDistribution = hasProbabilityDistribution(); List<? extends Number> coef = getCoef(); List<? extends Number> intercept = getIntercept(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<? extends Feature> features = schema.getFeatures(); if(numberOfClasses == 1){ SchemaUtil.checkSize(2, categoricalLabel); return RegressionModelUtil.createBinaryLogisticClassification(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, 0), intercept.get(0), RegressionModel.NormalizationMethod.LOGIT, hasProbabilityDistribution, schema); } else if(numberOfClasses >= 3){ SchemaUtil.checkSize(numberOfClasses, categoricalLabel); Schema segmentSchema = (schema.toAnonymousRegressorSchema(DataType.DOUBLE)).toEmptySchema(); List<RegressionModel> regressionModels = new ArrayList<>(); for(int i = 0, rows = categoricalLabel.size(); i < rows; i++){ RegressionModel regressionModel = RegressionModelUtil.createRegression(features, CMatrixUtil.getRow(coef, numberOfClasses, numberOfFeatures, i), intercept.get(i), RegressionModel.NormalizationMethod.LOGIT, segmentSchema) .setOutput(ModelUtil.createPredictedOutput(FieldName.create("decisionFunction(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE)); regressionModels.add(regressionModel); } return MiningModelUtil.createClassification(regressionModels, RegressionModel.NormalizationMethod.SIMPLEMAX, hasProbabilityDistribution, schema); } else { throw new IllegalArgumentException(); } }
Example #8
Source File: PCA.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 2 votes |
@Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ int[] shape = getComponentsShape(); int numberOfComponents = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> components = getComponents(); List<? extends Number> mean = getMean(); ClassDictUtil.checkSize(numberOfFeatures, features, mean); Boolean whiten = getWhiten(); List<? extends Number> explainedVariance = (whiten ? getExplainedVariance() : null); ClassDictUtil.checkSize(numberOfComponents, explainedVariance); String id = "pca@" + String.valueOf(PCA.SEQUENCE.getAndIncrement()); List<Feature> result = new ArrayList<>(); for(int i = 0; i < numberOfComponents; i++){ List<? extends Number> component = CMatrixUtil.getRow(components, numberOfComponents, numberOfFeatures, i); Apply apply = PMMLUtil.createApply(PMMLFunctions.SUM); for(int j = 0; j < numberOfFeatures; j++){ Feature feature = features.get(j); Number meanValue = mean.get(j); Number componentValue = component.get(j); if(ValueUtil.isZero(meanValue) && ValueUtil.isOne(componentValue)){ apply.addExpressions(feature.ref()); continue; } ContinuousFeature continuousFeature = feature.toContinuousFeature(); // "($name[i] - mean[i]) * component[i]" Expression expression = continuousFeature.ref(); if(!ValueUtil.isZero(meanValue)){ expression = PMMLUtil.createApply(PMMLFunctions.SUBTRACT, expression, PMMLUtil.createConstant(meanValue)); } // End if if(!ValueUtil.isOne(componentValue)){ expression = PMMLUtil.createApply(PMMLFunctions.MULTIPLY, expression, PMMLUtil.createConstant(componentValue)); } apply.addExpressions(expression); } if(whiten){ Number explainedVarianceValue = explainedVariance.get(i); if(!ValueUtil.isOne(explainedVarianceValue)){ apply = PMMLUtil.createApply(PMMLFunctions.DIVIDE, apply, PMMLUtil.createConstant(Math.sqrt(ValueUtil.asDouble(explainedVarianceValue)))); } } DerivedField derivedField = encoder.createDerivedField(FieldName.create(id + "[" + String.valueOf(i) + "]"), apply); result.add(new ContinuousFeature(encoder, derivedField)); } return result; }
Example #9
Source File: TruncatedSVD.java From jpmml-sklearn with GNU Affero General Public License v3.0 | 2 votes |
@Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ int[] shape = getComponentsShape(); int numberOfComponents = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> components = getComponents(); ClassDictUtil.checkSize(numberOfFeatures, features); String id = "svd@" + String.valueOf(TruncatedSVD.SEQUENCE.getAndIncrement()); List<Feature> result = new ArrayList<>(); for(int i = 0; i < numberOfComponents; i++){ List<? extends Number> component = CMatrixUtil.getRow(components, numberOfComponents, numberOfFeatures, i); Apply apply = PMMLUtil.createApply(PMMLFunctions.SUM); for(int j = 0; j < numberOfFeatures; j++){ Feature feature = features.get(j); Number componentValue = component.get(j); if(ValueUtil.isOne(componentValue)){ apply.addExpressions(feature.ref()); continue; } ContinuousFeature continuousFeature = feature.toContinuousFeature(); // "$name[i] * component[i]" Expression expression = PMMLUtil.createApply(PMMLFunctions.MULTIPLY, continuousFeature.ref(), PMMLUtil.createConstant(componentValue)); apply.addExpressions(expression); } DerivedField derivedField = encoder.createDerivedField(FieldName.create(id + "[" + String.valueOf(i) + "]"), apply); result.add(new ContinuousFeature(encoder, derivedField)); } return result; }