org.apache.spark.network.util.ByteUnit Scala Examples
The following examples show how to use org.apache.spark.network.util.ByteUnit.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ConfigBuilder.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.internal.config import java.util.concurrent.TimeUnit import org.apache.spark.network.util.{ByteUnit, JavaUtils} private object ConfigHelpers { def toNumber[T](s: String, converter: String => T, key: String, configType: String): T = { try { converter(s) } catch { case _: NumberFormatException => throw new IllegalArgumentException(s"$key should be $configType, but was $s") } } def toBoolean(s: String, key: String): Boolean = { try { s.toBoolean } catch { case _: IllegalArgumentException => throw new IllegalArgumentException(s"$key should be boolean, but was $s") } } def stringToSeq[T](str: String, converter: String => T): Seq[T] = { str.split(",").map(_.trim()).filter(_.nonEmpty).map(converter) } def seqToString[T](v: Seq[T], stringConverter: T => String): String = { v.map(stringConverter).mkString(",") } def timeFromString(str: String, unit: TimeUnit): Long = JavaUtils.timeStringAs(str, unit) def timeToString(v: Long, unit: TimeUnit): String = TimeUnit.MILLISECONDS.convert(v, unit) + "ms" def byteFromString(str: String, unit: ByteUnit): Long = { val (input, multiplier) = if (str.length() > 0 && str.charAt(0) == '-') { (str.substring(1), -1) } else { (str, 1) } multiplier * JavaUtils.byteStringAs(input, unit) } def byteToString(v: Long, unit: ByteUnit): String = unit.convertTo(v, ByteUnit.BYTE) + "b" } def onCreate(callback: ConfigEntry[_] => Unit): ConfigBuilder = { _onCreate = Option(callback) this } def intConf: TypedConfigBuilder[Int] = { new TypedConfigBuilder(this, toNumber(_, _.toInt, key, "int")) } def longConf: TypedConfigBuilder[Long] = { new TypedConfigBuilder(this, toNumber(_, _.toLong, key, "long")) } def doubleConf: TypedConfigBuilder[Double] = { new TypedConfigBuilder(this, toNumber(_, _.toDouble, key, "double")) } def booleanConf: TypedConfigBuilder[Boolean] = { new TypedConfigBuilder(this, toBoolean(_, key)) } def stringConf: TypedConfigBuilder[String] = { new TypedConfigBuilder(this, v => v) } def timeConf(unit: TimeUnit): TypedConfigBuilder[Long] = { new TypedConfigBuilder(this, timeFromString(_, unit), timeToString(_, unit)) } def bytesConf(unit: ByteUnit): TypedConfigBuilder[Long] = { new TypedConfigBuilder(this, byteFromString(_, unit), byteToString(_, unit)) } def fallbackConf[T](fallback: ConfigEntry[T]): ConfigEntry[T] = { new FallbackConfigEntry(key, _doc, _public, fallback) } }
Example 2
Source File: config.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.history import java.util.concurrent.TimeUnit import org.apache.spark.internal.config.ConfigBuilder import org.apache.spark.network.util.ByteUnit private[spark] object config { val DEFAULT_LOG_DIR = "file:/tmp/spark-events" val EVENT_LOG_DIR = ConfigBuilder("spark.history.fs.logDirectory") .stringConf .createWithDefault(DEFAULT_LOG_DIR) val MAX_LOG_AGE_S = ConfigBuilder("spark.history.fs.cleaner.maxAge") .timeConf(TimeUnit.SECONDS) .createWithDefaultString("7d") val LOCAL_STORE_DIR = ConfigBuilder("spark.history.store.path") .doc("Local directory where to cache application history information. By default this is " + "not set, meaning all history information will be kept in memory.") .stringConf .createOptional val MAX_LOCAL_DISK_USAGE = ConfigBuilder("spark.history.store.maxDiskUsage") .bytesConf(ByteUnit.BYTE) .createWithDefaultString("10g") val HISTORY_SERVER_UI_PORT = ConfigBuilder("spark.history.ui.port") .doc("Web UI port to bind Spark History Server") .intConf .createWithDefault(18080) }