com.fasterxml.jackson.annotation.JsonIgnore Scala Examples
The following examples show how to use com.fasterxml.jackson.annotation.JsonIgnore.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: SQLAppStatusStore.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.ui import java.lang.{Long => JLong} import java.util.Date import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import com.fasterxml.jackson.annotation.JsonIgnore import com.fasterxml.jackson.databind.annotation.JsonDeserialize import org.apache.spark.JobExecutionStatus import org.apache.spark.status.KVUtils.KVIndexParam import org.apache.spark.util.kvstore.{KVIndex, KVStore} class SparkPlanGraphNodeWrapper( val node: SparkPlanGraphNode, val cluster: SparkPlanGraphClusterWrapper) { def toSparkPlanGraphNode(): SparkPlanGraphNode = { assert(node == null ^ cluster == null, "One and only of of nore or cluster must be set.") if (node != null) node else cluster.toSparkPlanGraphCluster() } } case class SQLPlanMetric( name: String, accumulatorId: Long, metricType: String)
Example 2
Source File: ExecutorNumListener.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.monitor import java.text.SimpleDateFormat import java.util import java.util.Date import java.util.concurrent.atomic.AtomicBoolean import com.fasterxml.jackson.annotation.JsonIgnore import org.apache.spark.SparkContext import org.apache.spark.internal.Logging import org.apache.spark.scheduler.{ SparkListener, SparkListenerExecutorAdded, SparkListenerExecutorRemoved } import org.apache.spark.util.kvstore.KVIndex class ExecutorNumListener extends SparkListener with Logging { lazy val kvstore = SparkContext.getActive.get.statusStore.store var initialized: AtomicBoolean = new AtomicBoolean(false) var lastPointTime: Long = new Date().getTime var recentEventTime: Long = new Date().getTime private val liveExecutors = new util.HashSet[String]() def initialize(): Unit = { SparkContext.getActive.map(_.ui).flatten.foreach { case ui => ui.attachTab(new ExecutorNumTab(ui)) ui.addStaticHandler("static", "/static/special") } } def maybeAddPoint(time: Long): Unit = { if (!initialized.get) { initialize() initialized.compareAndSet(false, true) } if (time - lastPointTime > 20 * 1000) { addPoint(recentEventTime) addPoint(time) lastPointTime = time } recentEventTime = time } def addPoint(time: Long): Unit = { val executorNum = liveExecutors.size kvstore.write(new ExecutorNumWrapper(new ExecutorNum( s"own ${executorNum} executors at ${new SimpleDateFormat("HH:mm:ss").format(new Date(time))}", IndexedSeq(time, executorNum)))) } override def onExecutorAdded(event: SparkListenerExecutorAdded): Unit = { liveExecutors.add(event.executorId) maybeAddPoint(event.time) } override def onExecutorRemoved(event: SparkListenerExecutorRemoved): Unit = { liveExecutors.remove(event.executorId) maybeAddPoint(event.time) } } private[spark] class ExecutorNumWrapper(val point: ExecutorNum) { @JsonIgnore @KVIndex def id: Long = point.value(0) } private[spark] class ExecutorNum(val name: String, val value: IndexedSeq[Long])
Example 3
Source File: SingletonMemorySink.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.application.sinks import java.time.{Duration, Instant} import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue} import java.util.function import com.amazon.milan.Id import com.amazon.milan.application.DataSink import com.amazon.milan.typeutil.TypeDescriptor import com.fasterxml.jackson.annotation.JsonIgnore import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.concurrent.TimeoutException object SingletonMemorySink { private val values = new ConcurrentHashMap[String, ArrayBuffer[MemorySinkRecord[_]]]() private val nextSeqNum = new mutable.HashMap[String, Int]() private val locks = new ConcurrentHashMap[String, Object]() private def makeCreateBufferFunction[T]: java.util.function.Function[String, ArrayBuffer[MemorySinkRecord[_]]] = new function.Function[String, ArrayBuffer[MemorySinkRecord[_]]] { override def apply(t: String): ArrayBuffer[MemorySinkRecord[_]] = (new ArrayBuffer[MemorySinkRecord[T]]()).asInstanceOf[ArrayBuffer[MemorySinkRecord[_]]] } private val createLocker = new java.util.function.Function[String, Object] { override def apply(t: String): AnyRef = new Object() } @JsonIgnore def getRecordCount: Int = SingletonMemorySink.getBuffer(this.sinkId).size @JsonIgnore def getValues: List[T] = { SingletonMemorySink.getBuffer[T](this.sinkId).map(_.value).toList } @JsonIgnore def getRecords: List[MemorySinkRecord[T]] = { SingletonMemorySink.getBuffer[T](this.sinkId).toList } def waitForItems(itemCount: Int, timeout: Duration = null): Unit = { val endTime = if (timeout == null) Instant.MAX else Instant.now().plus(timeout) while (SingletonMemorySink.getBuffer(this.sinkId).size < itemCount) { if (Instant.now().isAfter(endTime)) { throw new TimeoutException() } Thread.sleep(1) } } override def equals(obj: Any): Boolean = { obj match { case o: SingletonMemorySink[_] => this.sinkId.equals(o.sinkId) case _ => false } } } class MemorySinkRecord[T](val seqNum: String, val createdTime: Instant, val value: T) extends Serializable
Example 4
Source File: Point.scala From magellan with Apache License 2.0 | 5 votes |
package magellan import com.fasterxml.jackson.annotation.{JsonIgnore, JsonProperty} import org.apache.spark.sql.types._ import org.json4s.JsonAST.JValue import org.json4s.JsonDSL._ override def transform(fn: (Point) => Point): Point = fn(this) def withinCircle(origin: Point, radius: Double): Boolean = { val sqrdL2Norm = Math.pow((origin.getX() - getX()), 2) + Math.pow((origin.getY() - getY()), 2) sqrdL2Norm <= Math.pow(radius, 2) } @JsonProperty override def getType(): Int = 1 override def jsonValue: JValue = ("type" -> "udt") ~ ("class" -> this.getClass.getName) ~ ("pyClass" -> "magellan.types.PointUDT") ~ ("x" -> x) ~ ("y" -> y) @JsonProperty override def boundingBox = BoundingBox(x, y, x, y) @JsonIgnore override def isEmpty(): Boolean = true } object Point { def apply(x: Double, y: Double) = { val p = new Point() p.setX(x) p.setY(y) p } }
Example 5
Source File: CommonLog.scala From AppCrawler with Apache License 2.0 | 5 votes |
package com.testerhome.appcrawler import java.io.OutputStreamWriter import com.fasterxml.jackson.annotation.JsonIgnore import org.apache.log4j._ trait CommonLog { BasicConfigurator.configure() Logger.getRootLogger.setLevel(Level.INFO) @JsonIgnore val layout=new PatternLayout("%d{yyyy-MM-dd HH:mm:ss} %p [%c{1}.%M.%L] %m%n") @JsonIgnore lazy val log = initLog() def initLog(): Logger ={ val log = Logger.getLogger(this.getClass.getName) //val log=Logger.getRootLogger if(log.getAppender("console")==null){ val console=new ConsoleAppender() console.setName("console") console.setWriter(new OutputStreamWriter(System.out)) console.setLayout(layout) log.addAppender(console) }else{ log.info("alread exist") } log.trace(s"set ${this} log level to ${GA.logLevel}") log.setLevel(GA.logLevel) log.setAdditivity(false) log } }
Example 6
Source File: AutoJobDesc.scala From comet-data-pipeline with Apache License 2.0 | 5 votes |
package com.ebiznext.comet.schema.model import com.ebiznext.comet.config.{DatasetArea, Settings, StorageArea} import com.fasterxml.jackson.annotation.JsonIgnore import org.apache.hadoop.fs.Path case class AutoJobDesc( name: String, tasks: List[AutoTaskDesc], area: Option[StorageArea] = None, format: Option[String], coalesce: Option[Boolean], udf: Option[String] = None, views: Option[Map[String, String]] = None ) { def getArea(): StorageArea = area.getOrElse(StorageArea.business) }
Example 7
Source File: SQLAppStatusStore.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.ui import java.lang.{Long => JLong} import java.util.Date import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import com.fasterxml.jackson.annotation.JsonIgnore import com.fasterxml.jackson.databind.annotation.JsonDeserialize import org.apache.spark.JobExecutionStatus import org.apache.spark.status.KVUtils.KVIndexParam import org.apache.spark.util.kvstore.{KVIndex, KVStore} class SparkPlanGraphNodeWrapper( val node: SparkPlanGraphNode, val cluster: SparkPlanGraphClusterWrapper) { def toSparkPlanGraphNode(): SparkPlanGraphNode = { assert(node == null ^ cluster == null, "One and only of of nore or cluster must be set.") if (node != null) node else cluster.toSparkPlanGraphCluster() } } case class SQLPlanMetric( name: String, accumulatorId: Long, metricType: String)
Example 8
Source File: VariablesExportItem.scala From asura with MIT License | 5 votes |
package asura.core.es.model import asura.common.util.StringUtils import com.fasterxml.jackson.annotation.JsonIgnore case class VariablesExportItem( srcPath: String, dstName: String, scope: String, description: String, enabled: Boolean = true, function: String = null, extra: VariablesItemExtraData = null, ) { @JsonIgnore def isValid(): Boolean = { !StringUtils.hasEmpty(srcPath, dstName, scope) && enabled } }
Example 9
Source File: JobReport.scala From asura with MIT License | 5 votes |
package asura.core.es.model import asura.common.util.StringUtils import asura.core.es.EsConfig import asura.core.job.JobExecDesc import com.fasterxml.jackson.annotation.JsonIgnore import com.sksamuel.elastic4s.requests.mappings._ case class JobReport( val scheduler: String, val group: String, val project: String, val jobId: String, val jobName: String, val `type`: String, val classAlias: String, var startAt: String = null, var endAt: String = null, var elapse: Long = 0L, var result: String = JobExecDesc.STATUS_SUCCESS, var errorMsg: String = StringUtils.EMPTY, val node: String = JobReport.hostname, val data: JobReportData = JobReportData(), var statis: JobReportDataStatistic = null, val summary: String = StringUtils.EMPTY, val description: String = StringUtils.EMPTY, var creator: String = null, var createdAt: String = null, var updatedAt: String = StringUtils.EMPTY, ) extends BaseIndex { @JsonIgnore def isSuccessful(): Boolean = { JobExecDesc.STATUS_SUCCESS == result } } object JobReport extends IndexSetting { val Index: String = s"${EsConfig.IndexPrefix}job-report" val mappings: MappingDefinition = MappingDefinition( BaseIndex.fieldDefinitions ++ Seq( KeywordField(name = FieldKeys.FIELD_SCHEDULER), KeywordField(name = FieldKeys.FIELD_GROUP), KeywordField(name = FieldKeys.FIELD_PROJECT), KeywordField(name = FieldKeys.FIELD_JOB_ID), TextField(name = FieldKeys.FIELD_JOB_NAME, copyTo = Seq(FieldKeys.FIELD__TEXT), analysis = EsConfig.IK_ANALYZER), KeywordField(name = FieldKeys.FIELD_TYPE), KeywordField(name = FieldKeys.FIELD_CLASS_ALIAS), BasicField(name = FieldKeys.FIELD_START_AT, `type` = "date", format = Some(EsConfig.DateFormat)), BasicField(name = FieldKeys.FIELD_END_AT, `type` = "date", format = Some(EsConfig.DateFormat)), BasicField(name = FieldKeys.FIELD_ELAPSE, `type` = "long"), KeywordField(name = FieldKeys.FIELD_RESULT), TextField(name = FieldKeys.FIELD_ERROR_MSG, analysis = EsConfig.IK_ANALYZER), KeywordField(name = FieldKeys.FIELD_NODE), ObjectField(name = FieldKeys.FIELD_STATIS, fields = Seq( BasicField(name = FieldKeys.FIELD_CASE_COUNT, `type` = "integer"), BasicField(name = FieldKeys.FIELD_CASE_OK, `type` = "integer"), BasicField(name = FieldKeys.FIELD_CASE_KO, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_COUNT, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_OK, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_KO, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_CASE_COUNT, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_CASE_OK, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_CASE_KO, `type` = "integer"), BasicField(name = FieldKeys.FIELD_SCENARIO_CASE_OO, `type` = "integer"), BasicField(name = FieldKeys.FIELD_Ok_RATE, `type` = "integer"), BasicField(name = FieldKeys.FIELD_ASSERTION_PASSED, `type` = "integer"), BasicField(name = FieldKeys.FIELD_ASSERTION_FAILED, `type` = "integer"), )), ObjectField(name = FieldKeys.FIELD_DATA, dynamic = Some("false")), ) ) val hostname = try { import scala.sys.process._ "hostname".!!.trim } catch { case _: Throwable => "Unknown" } val TYPE_QUARTZ = "quartz" val TYPE_CI = "ci" val TYPE_TEST = "test" val TYPE_MANUAL = "manual" }
Example 10
Source File: VariablesImportItem.scala From asura with MIT License | 5 votes |
package asura.core.es.model import asura.common.util.StringUtils import com.fasterxml.jackson.annotation.JsonIgnore case class VariablesImportItem( name: String, scope: String, value: Object, description: String, `type`: String = null, extra: VariablesItemExtraData = null, enabled: Boolean = true, exposed: Boolean = true, function: String = null, ) { @JsonIgnore def isValid(): Boolean = { !StringUtils.hasEmpty(name, scope) && null != value && enabled } } object VariablesImportItem { val TYPE_ENUM = "enum" }
Example 11
Source File: ScenarioStep.scala From asura with MIT License | 5 votes |
package asura.core.es.model import asura.common.util.StringUtils import asura.core.es.model.ScenarioStep.StepData import com.fasterxml.jackson.annotation.JsonIgnore case class ScenarioStep( id: String, `type`: String, stored: Boolean = false, // do need to store in context enabled: Boolean = true, data: StepData = null, ) { @JsonIgnore def isScenarioStep(): Boolean = { if (StringUtils.isNotEmpty(id)) { StringUtils.isEmpty(`type`) || ScenarioStep.TYPE_SCENARIO.equals(`type`) } else { false } } } object ScenarioStep { // use `case` for http request backward compatible val TYPE_HTTP = "case" val TYPE_SQL = "sql" val TYPE_DUBBO = "dubbo" val TYPE_SCENARIO = "scenario" val TYPE_JOB = "job" val TYPE_DELAY = "delay" val TYPE_JUMP = "jump" val TIME_UNIT_MILLI = "milli" val TIME_UNIT_SECOND = "second" val TIME_UNIT_MINUTE = "minute" case class StepData( delay: DelayCondition = null, jump: JumpConditions = null, ) case class DelayCondition(value: Int, timeUnit: String) case class AssertJumpCondition( assert: Map[String, Any] = null, to: Int = 0, ) case class JumpConditions( `type`: Int = 0, // 0: assert, 1 : script conditions: Seq[AssertJumpCondition] = Nil, script: String = null, // which will return a integer ) }
Example 12
Source File: Chirp.scala From activator-lagom-scala-chirper with Apache License 2.0 | 5 votes |
package sample.chirper.chirp.api import java.time.Instant import java.util.UUID import com.fasterxml.jackson.annotation.JsonIgnore case class Chirp @JsonIgnore()(userId: String, message: String, timestamp: Instant, uuid: String) { def this(userId: String, message: String) = this(userId, message, Chirp.defaultTimestamp, Chirp.defaultUUID) } object Chirp { implicit object ChirpOrdering extends Ordering[Chirp] { override def compare(x: Chirp, y: Chirp): Int = x.timestamp.compareTo(y.timestamp) } def apply(userId: String, message: String, timestamp: Option[Instant], uuid: Option[String]): Chirp = new Chirp(userId, message, timestamp.getOrElse(defaultTimestamp), uuid.getOrElse(defaultUUID)) private def defaultTimestamp = Instant.now() private def defaultUUID = UUID.randomUUID().toString() }
Example 13
Source File: TestParams.scala From activator-lagom-scala-chirper with Apache License 2.0 | 5 votes |
package sample.chirper.load.api import com.fasterxml.jackson.annotation.JsonIgnore case class TestParams @JsonIgnore()( users: Int, friends: Int, chirps: Int, clients: Int, parallelism: Int, userIdPrefix: Option[String] ){ def this() = this(1000, 10, 100000, 10, 10, Option.empty) }