org.dmg.pmml.PMML Scala Examples

The following examples show how to use org.dmg.pmml.PMML. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: PMMLModelExport.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 2
Source File: PmmlEvaluatorKit.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.utils

import io.radicalbit.flink.pmml.scala.api.Evaluator
import org.apache.flink.ml.math.{DenseVector, SparseVector, Vector}
import org.dmg.pmml.{FieldName, PMML}
import org.jpmml.evaluator.{FieldValueUtil, ModelEvaluatorFactory}

trait PmmlEvaluatorKit {

  final protected def buildEvaluator(pmml: PMML): Evaluator =
    Evaluator(ModelEvaluatorFactory.newInstance.newModelEvaluator(pmml))

  final protected def buildExpectedInputMap(in: Vector, keys: Seq[String]) = {
    val data: Seq[Option[Double]] = in match {
      case dv: DenseVector => dv.data.map(Option(_))
      case sv: SparseVector => (0 to keys.size).map(index => if (sv.indices.contains(index)) Some(sv(index)) else None)
    }

    keys.zip(data).collect { case (k, Some(v)) => k -> v } toMap
  }

  final protected def buildExpectedPreparedMap(in: Map[String, Any], keys: Seq[String], replaceValue: Option[Double]) =
    keys.map {
      case k if in.contains(k) => new FieldName(k) -> FieldValueUtil.create(null, null, in(k))
      case emptyKey => new FieldName(emptyKey) -> FieldValueUtil.create(null, null, replaceValue.orNull)
    } toMap

} 
Example 3
Source File: PmmlLoaderKit.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.utils

import org.dmg.pmml.PMML
import org.jpmml.model.{ImportFilter, JAXBUtil}
import org.xml.sax.InputSource

trait PmmlLoaderKit {

  protected case object Source {
    val KmeansPmml = "/kmeans.xml"
    val KmeansPmml41 = "/kmeans41.xml"
    val KmeansPmml40 = "/kmeans40.xml"
    val KmeansPmml42 = "/kmeans42.xml"
    val KmeansPmml32 = "/kmeans41.xml"

    val KmeansPmmlEmpty = "/kmeans_empty.xml"
    val KmeansPmmlNoOut = "/kmeans_nooutput.xml"
    val KmeansPmmlStringFields = "/kmeans_stringfields.xml"
    val KmeansPmmlNoOutNoTrg = "/kmeans_nooutput_notarget.xml"
    val NotExistingPath: String = "/not/existing/" + scala.util.Random.nextString(4)
  }

  final protected def getPMMLSource(path: String): String =
    getClass.getResource(path).getPath

  final protected def getPMMLResource(path: String): PMML = {
    val source = scala.io.Source.fromURL(getClass.getResource(path)).reader()
    JAXBUtil.unmarshalPMML(ImportFilter.apply(new InputSource(source)))
  }

} 
Example 4
Source File: PMMLModelExport.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US).format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 5
Source File: PMMLModelExport.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US).format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 6
Source File: PMMLModelExport.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = new PMML

  setHeader(pmml)

  private def setHeader(pmml: PMML): Unit = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application().withName("Apache Spark MLlib").withVersion(version)
    val timestamp = new Timestamp()
      .withContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .withApplication(app)
      .withTimestamp(timestamp)
    pmml.setHeader(header)
  }
} 
Example 7
Source File: PMMLModelExport.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = new PMML

  setHeader(pmml)

  private def setHeader(pmml: PMML): Unit = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application().withName("Apache Spark MLlib").withVersion(version)
    val timestamp = new Timestamp()
      .withContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .withApplication(app)
      .withTimestamp(timestamp)
    pmml.setHeader(header)
  }
} 
Example 8
Source File: PMMLModelExport.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US).format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 9
Source File: PMMLModelExport.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = new PMML

  pmml.setVersion("4.2")
  setHeader(pmml)

  private def setHeader(pmml: PMML): Unit = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application().withName("Apache Spark MLlib").withVersion(version)
    val timestamp = new Timestamp()
      .withContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .withApplication(app)
      .withTimestamp(timestamp)
    pmml.setHeader(header)
  }
} 
Example 10
Source File: PMMLReadWriteTest.scala    From sona   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.sona.ml.util

import java.io.{File, IOException}

import org.dmg.pmml.PMML
import org.scalatest.Suite
import org.apache.spark.SparkContext
import com.tencent.angel.sona.ml.param.Params


trait PMMLReadWriteTest extends TempDirectory { self: Suite =>
  /**
   * Test PMML export. Requires exported model is small enough to be loaded locally.
   * Checks that the model can be exported and the result is valid PMML, but does not check
   * the specific contents of the model.
   */
  def testPMMLWrite[T <: Params with GeneralMLWritable](sc: SparkContext, instance: T,
    checkModelData: PMML => Unit): Unit = {
    val uid = instance.uid
    val subdirName = Identifiable.randomUID("pmml-")

    val subdir = new File(tempDir, subdirName)
    val path = new File(subdir, uid).getPath

    instance.write.format("pmml").save(path)
    intercept[IOException] {
      instance.write.format("pmml").save(path)
    }
    instance.write.format("pmml").overwrite().save(path)
    val pmmlStr = sc.textFile(path).collect.mkString("\n")
    val pmmlModel = PMMLUtils.loadFromString(pmmlStr)
    assert(pmmlModel.getHeader.getApplication.getName.startsWith("Apache Spark"))
    checkModelData(pmmlModel)
  }
} 
Example 11
Source File: PMMLModel.scala    From model-serving-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.modelserving.model.PMML

import java.io._
import java.util

import com.lightbend.model.modeldescriptor.ModelDescriptor
import com.lightbend.modelserving.model.{Model, ModelFactory}
import org.dmg.pmml.{FieldName, PMML}
import org.jpmml.evaluator.visitors._
import org.jpmml.evaluator._
import org.jpmml.model.PMMLUtil

import scala.collection._


  def optimize(pmml : PMML) = this.synchronized {
    optimizers.foreach(opt =>
      try {
        opt.applyTo(pmml)
      } catch {
        case t: Throwable => {
          println(s"Error optimizing model for optimizer $opt")
          t.printStackTrace()
        }
      }
    )
  }
}