java.util.Calendar Scala Examples
The following examples show how to use java.util.Calendar.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: CalendarFormats.scala From sjson-new with Apache License 2.0 | 6 votes |
package sjsonnew import java.util.{ Calendar, GregorianCalendar } import java.time._ import java.time.format.DateTimeFormatter trait CalendarFormats { self: IsoFormats => private val utc: ZoneId = ZoneId.of("UTC") implicit val localDateTimeStringIso: IsoString[LocalDateTime] = { IsoString.iso[LocalDateTime]( (ld: LocalDateTime) => { val datetimefmt = DateTimeFormatter.ISO_LOCAL_DATE_TIME ld.format(datetimefmt) }, (s: String) => { val datetimefmt = DateTimeFormatter.ISO_LOCAL_DATE_TIME LocalDateTime.parse(s, datetimefmt) }) } }
Example 2
Source File: JacksonMessageWriter.scala From drizzle-spark with Apache License 2.0 | 6 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.nio.charset.StandardCharsets import java.text.SimpleDateFormat import java.util.{Calendar, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8)) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'") val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 3
Source File: JacksonMessageWriter.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.text.SimpleDateFormat import java.util.{Calendar, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes("utf-8")) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'") val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 4
Source File: JVMCrossDate.scala From perfolation with MIT License | 5 votes |
package perfolation import java.util.{Calendar, TimeZone} class JVMCrossDate(val calendar: java.util.Calendar) extends AnyVal with CrossDate { override def milliseconds: Long = calendar.getTimeInMillis override def hour24: Int = calendar.get(Calendar.HOUR_OF_DAY) override def minuteOfHour: Int = calendar.get(Calendar.MINUTE) override def secondOfMinute: Int = calendar.get(Calendar.SECOND) override def milliOfSecond: Int = calendar.get(Calendar.MILLISECOND) override def isAM: Boolean = calendar.get(Calendar.AM_PM) == 0 override def timeZoneOffsetMillis: Int = calendar.get(Calendar.ZONE_OFFSET) + calendar.get(Calendar.DST_OFFSET) override def year: Int = calendar.get(Calendar.YEAR) override def month: Int = calendar.get(Calendar.MONTH) override def dayOfWeek: Int = calendar.get(Calendar.DAY_OF_WEEK) override def dayOfMonth: Int = calendar.get(Calendar.DAY_OF_MONTH) override def dayOfYear: Int = calendar.get(Calendar.DAY_OF_YEAR) - 1 override def timeZone: String = calendar.getTimeZone.getDisplayName(calendar.get(Calendar.DST_OFFSET) != 0, TimeZone.SHORT) }
Example 5
Source File: SchemaData.scala From pulsar-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.pulsar import java.sql.Timestamp import java.util import java.util.Calendar import scala.beans.BeanProperty import scala.collection.JavaConverters._ object SchemaData { val booleanSeq = Seq(true, false, true, true, false) val bytesSeq = 1.to(5).map(_.toString.getBytes) val cal = Calendar.getInstance() cal.clear() val dateSeq = (1 to 5).map { i => cal.set(2019, 0, i) cal.getTime } cal.clear() val timestampSeq = (1 to 5).map { i => cal.set(2019, 0, i, 20, 35, 40) new Timestamp(cal.getTimeInMillis) } val stringSeq = 1.to(5).map(_.toString) val int8Seq = 1.to(5).map(_.toByte) val doubleSeq = 1.to(5).map(_.toDouble) val floatSeq = 1.to(5).map(_.toFloat) val int32Seq = 1.to(5) val int64Seq = 1.to(5).map(_.toLong) val int16Seq = 1.to(5).map(_.toShort) case class Foo(@BeanProperty i: Int, @BeanProperty f: Float, @BeanProperty bar: Bar) case class Bar(@BeanProperty b: Boolean, @BeanProperty s: String) case class F1(@BeanProperty baz: Baz) case class Baz( @BeanProperty f: Float, @BeanProperty d: Double, @BeanProperty mp: util.Map[String, Bar], @BeanProperty arr: Array[Bar]) val fooSeq: Seq[Foo] = Foo(1, 1.0.toFloat, Bar(true, "a")) :: Foo(2, 2.0.toFloat, Bar(false, "b")) :: Foo(3, 0, null) :: Nil val f1Seq: Seq[F1] = F1( Baz( Float.NaN, Double.NaN, Map("1" -> Bar(true, "1"), "2" -> Bar(false, "2")).asJava, Array(Bar(true, "1"), Bar(true, "2")))) :: F1( Baz( Float.NegativeInfinity, Double.NegativeInfinity, Map("" -> Bar(true, "1")).asJava, null)) :: F1(Baz(Float.PositiveInfinity, Double.PositiveInfinity, null, null)) :: F1(Baz(1.0.toFloat, 2.0, null, null)) :: Nil val f1Results = f1Seq.map(f1 => (f1.baz.f, f1.baz.d, if (f1.baz.mp == null) null else f1.baz.mp.asScala, f1.baz.arr)) }
Example 6
Source File: GimelProperties.scala From gimel with Apache License 2.0 | 5 votes |
package com.paypal.gimel.common.conf import java.io.{File, FileInputStream} import java.util.{Calendar, Properties} import scala.collection.JavaConverters._ import scala.collection.mutable import com.paypal.gimel.logger.Logger class GimelProperties(userProps: Map[String, String] = Map[String, String]()) { // Get Logger val logger = Logger() logger.info(s"Initiating --> ${this.getClass.getName}") // Get Properties val props: mutable.Map[String, String] = getProps val runTagUUID: String = java.util.UUID.randomUUID.toString val startTimeMS: String = Calendar.getInstance().getTimeInMillis.toString val tagToAdd: String = s"_$startTimeMS" private def getConf(key: String): String = { userProps.getOrElse(key, props(key)) } // Kafka Properties val kafkaBroker: String = getConf(GimelConstants.KAFKA_BROKER_LIST) val kafkaConsumerCheckPointRoot: String = getConf(GimelConstants.KAFKA_CONSUMER_CHECKPOINT_PATH) val kafkaAvroSchemaKey: String = getConf(GimelConstants.KAFKA_CDH_SCHEMA) val confluentSchemaURL: String = getConf(GimelConstants.CONFLUENT_SCHEMA_URL) val hbaseNameSpace: String = getConf(GimelConstants.HBASE_NAMESPACE) val zkHostAndPort: String = getConf(GimelConstants.ZOOKEEPER_LIST) val zkPrefix: String = getConf(GimelConstants.ZOOKEEPER_STATE) val esHost: String = getConf(GimelConstants.ES_NODE) val esPort: String = getConf(GimelConstants.ES_PORT) // Kerberos val keytab: String = getConf(GimelConstants.KEY_TAB) val principal: String = getConf(GimelConstants.KEY_TAB_PRINCIPAL) val cluster: String = getConf(GimelConstants.CLUSTER) val dataSetDeploymentClusters: String = getConf(GimelConstants.DEPLOYMENT_CLUSTERS) val defaultESCluster: String = props(GimelConstants.ES_POLLING_STORAGES) def hiveURL(cluster: String): String = { userProps.getOrElse(s"gimel.hive.$cluster.url", props(s"gimel.hive.$cluster.url")) } def esURL(escluster: String): String = { val alternateConfig = props(s"gimel.es.${defaultESCluster}.url") userProps.getOrElse(GimelConstants.ES_URL_WITH_PORT, alternateConfig) } def apply(params: Map[String, String]): GimelProperties = new GimelProperties(params) }
Example 7
Source File: TSQR.scala From SparkAndMPIFactorizations with MIT License | 5 votes |
package edu.berkeley.cs.amplab.mlmatrix import java.util.concurrent.ThreadLocalRandom import scala.collection.mutable.ArrayBuffer import breeze.linalg._ import edu.berkeley.cs.amplab.mlmatrix.util.QRUtils import edu.berkeley.cs.amplab.mlmatrix.util.Utils import org.apache.spark.rdd.RDD import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.Accumulator import org.apache.spark.SparkContext._ import java.util.Calendar import java.text.SimpleDateFormat class modifiedTSQR extends Serializable { def report(message: String, verbose: Boolean = true) = { val now = Calendar.getInstance().getTime() val formatter = new SimpleDateFormat("H:m:s") if (verbose) { println("STATUS REPORT (" + formatter.format(now) + "): " + message) } } private def reduceQR( acc: Accumulator[Double], a: Tuple2[DenseVector[Double], DenseMatrix[Double]], b: Tuple2[DenseVector[Double], DenseMatrix[Double]]): Tuple2[DenseVector[Double], DenseMatrix[Double]] = { val begin = System.nanoTime val outmat = QRUtils.qrR(DenseMatrix.vertcat(a._2, b._2), false) val outcolnorms = a._1 + b._1 acc += ((System.nanoTime - begin) / 1e6) (outcolnorms, outmat) } }
Example 8
Source File: JacksonMessageWriter.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.text.SimpleDateFormat import java.util.{Calendar, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes("utf-8")) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'") val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 9
Source File: Driver.scala From OnlineLDA_Spark with Apache License 2.0 | 5 votes |
package com.github.yuhao.yang import java.util.Calendar import org.apache.log4j.{Level, Logger} import org.apache.spark.{SparkContext, SparkConf} import scala.collection.mutable.ArrayBuffer object Driver extends Serializable{ def main(args: Array[String]) { Logger.getLogger("org").setLevel(Level.ERROR) Logger.getLogger("akka").setLevel(Level.ERROR) val inputDir = args(0) val filePaths = extractPaths(inputDir + "texts", true) val stopWordsPath = inputDir + "stop.txt" val vocabPath = inputDir + "wordsEn.txt" println("begin: " + Calendar.getInstance().getTime) println("path size: " + filePaths.size) assert(filePaths.size > 0) val conf = new SparkConf().setAppName("online LDA Spark") val sc = new SparkContext(conf) val vocab = Docs2Vec.extractVocab(sc, Seq(vocabPath), stopWordsPath) val vocabArray = vocab.map(_.swap) val K = args(1).toInt // val lda = OnlineLDA_Spark.runBatchMode(sc, filePaths, vocab, K, 50) val lda = OnlineLDA_Spark.runOnlineMode(sc, filePaths, vocab, K, args(2).toInt) println("_lambda:") for(row <- 0 until lda._lambda.rows){ val v = lda._lambda(row, ::).t val topk = lda._lambda(row, ::).t.argtopk(10) val pairs = topk.map(k => (vocabArray(k), v(k))) val sorted = pairs.sortBy(_._2).reverse println(sorted.map(x => (x._1)).mkString(","), sorted.map(x => ("%2.2f".format(x._2))).mkString(",")) } println("end: " + Calendar.getInstance().getTime()) } def extractPaths(path: String, recursive: Boolean = true): Array[String] ={ val docsets = ArrayBuffer[String]() val fileList = new java.io.File(path).listFiles() if(fileList == null) return docsets.toArray for(f <- fileList){ if(f.isDirectory){ if(recursive) docsets ++= extractPaths(f.getAbsolutePath, true) } else{ docsets += f.getAbsolutePath } } docsets.toArray } }
Example 10
Source File: StreamingDemo.scala From flink-demos with Apache License 2.0 | 5 votes |
package com.dataartisans.flink.example.eventpattern import java.text.SimpleDateFormat import java.util import java.util.{Calendar, Properties, UUID} import com.dataartisans.flink.example.eventpattern.kafka.EventDeSerializer import org.apache.flink.api.common.functions.{RuntimeContext, RichFlatMapFunction} import org.apache.flink.api.common.state.{ValueState, ValueStateDescriptor} import org.apache.flink.configuration.Configuration import org.apache.flink.streaming.api.scala._ import org.apache.flink.streaming.connectors.elasticsearch.{IndexRequestBuilder, ElasticsearchSink} import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08 import org.apache.flink.util.Collector import org.elasticsearch.action.index.IndexRequest import org.elasticsearch.client.Requests class StateMachineMapper extends RichFlatMapFunction[Event, Alert] { private[this] var currentState: ValueState[State] = _ override def open(config: Configuration): Unit = { currentState = getRuntimeContext.getState( new ValueStateDescriptor("state", classOf[State], InitialState)) } override def flatMap(t: Event, out: Collector[Alert]): Unit = { val state = currentState.value() val nextState = state.transition(t.event) nextState match { case InvalidTransition => out.collect(Alert(t.sourceAddress, state, t.event)) case x if x.terminal => currentState.clear() case x => currentState.update(nextState) } } }
Example 11
Source File: JacksonMessageWriter.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.nio.charset.StandardCharsets import java.text.SimpleDateFormat import java.util.{Calendar, Locale, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8)) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'", Locale.US) val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 12
Source File: BusinessLogger.scala From languagedetector with MIT License | 5 votes |
package biz.meetmatch.logging import java.text.SimpleDateFormat import java.util.{Calendar, Date} import org.slf4j.LoggerFactory object BusinessLogger { def getDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") } class BusinessLogger(module: String) { private val logger = LoggerFactory.getLogger("businessLogger") def calcStarted(options: String, sparkAppId: String): Unit = { log(s"CALC\tSTART\t$options\t$sparkAppId") } def calcStopped(result: String): Unit = { log(s"CALC\tSTOP\t$result") } def jobStarted(jobId: Int, jobDescription: String, stageCount: Int, executionId: Option[String]): Unit = { log(s"JOB\t$jobId\tSTART\t${jobDescription.replace("\n", " ").replace("\t", " ")}\t$stageCount\t${executionId.getOrElse("")}") } def jobStopped(jobId: Int, result: String): Unit = { log(s"JOB\t$jobId\tSTOP\t$result") } def transactionStarted(category: String, id: String, stageId: Int = -1, partitionId: Int = -1, taskId: Long = -1, message: String = ""): Unit = { log(s"TRANSACTION\t$category\t$id\tSTART\t$stageId\t$partitionId\t$taskId\t${message.replace("\n", " ").replace("\t", " ")}") } def transactionStopped(category: String, id: String): Unit = { log(s"TRANSACTION\t$category\t$id\tSTOP") } def dataParquetRead(tableName: String, count: Long = -1): Unit = { log(s"DATA\tPARQUET\tREAD\t${tableName.replace("\n", " ").replace("\t", " ")}\t$count") } def dataParquetWritten(tableName: String, countBefore: Long, countAfter: Long): Unit = { log(s"DATA\tPARQUET\tWRITE\t${tableName.replace("\n", " ").replace("\t", " ")}\t$countBefore\t$countAfter") } def dataJdbcRead(tableName: String, count: Long = -1): Unit = { log(s"DATA\tJDBC\tREAD\t${tableName.replace("\n", " ").replace("\t", " ")}\t$count") } def dataJdbcWritten(tableName: String, countBefore: Long = -1, countAfter: Long = -1, countUpdated: Long = -1): Unit = { log(s"DATA\tJDBC\tWRITE\t${tableName.replace("\n", " ").replace("\t", " ")}\t$countBefore\t$countAfter\t$countUpdated") } def info(subject: String, message: String): Unit = { log(s"MESSAGE\tINFO\t${subject.replace("\n", " ").replace("\t", " ")}\t${message.replace("\n", " ").replace("\t", " ")}") } def warn(subject: String, message: String): Unit = { log(s"MESSAGE\tWARN\t${subject.replace("\n", " ").replace("\t", " ")}\t${message.replace("\n", " ").replace("\t", " ")}") } def error(subject: String, message: String): Unit = { log(s"MESSAGE\tERROR\t${subject.replace("\n", " ").replace("\t", " ")}\t${message.replace("\n", " ").replace("\t", " ")}") } private def log(line: String) = { logger.info(s"${BusinessLogger.getDateFormat.format(Calendar.getInstance.getTime)}\t$module\t$line") } } case class LogLineWorkflow(message: String, startDate: String, stopDate: Date, duration: String, state: String, options: Array[Array[String]], sparkAppId: String, calcs: Array[LogLineCalc], warnings: Int, errors: Int) case class LogLineCalc(module: String, startDate: String, stopDate: Date, duration: String, state: String, options: Array[Array[String]], sparkAppId: String, jobs: Array[LogLineJob], transactionCategories: Array[LogLineTransactionCategory], dataReads: Array[LogLineDataRead], dataWrites: Array[LogLineDataWrite], messages: Array[LogLineMessage]) case class LogLineJob(id: Int, startDate: String, duration: String, state: String, description: String, stageCount: Int, executionId: Int = -1) case class LogLineTransactionCategory(category: String, transactions: Array[LogLineTransaction], numberOfTransactions: Int, averageFinishedTransactionDuration: String) case class LogLineTransaction(category: String, id: String, stageId: Int, partitionId: Int, taskId: Long, message: String, startDate: String, duration: String, state: String) case class LogLineDataRead(storage: String, tableName: String, count: Int, date: String) case class LogLineDataWrite(storage: String, tableName: String, countBefore: Int, countAfter: Int, countUpdated: Int, date: String) case class LogLineMessage(category: String, subject: String, message: String, date: String)
Example 13
Source File: OilPriceFunc.scala From Mastering-Spark-for-Data-Science with MIT License | 5 votes |
package io.gzet.geomesa import java.text.SimpleDateFormat import java.util.Calendar import org.apache.spark.sql.SparkSession import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.functions.{udf, window, last, col, lag} object OilPriceFunc { // use this if the window function misbehaves due to timezone e.g. BST // ./spark-shell --driver-java-options "-Duser.timezone=UTC" // ./spark-submit --conf 'spark.driver.extraJavaOptions=-Duser.timezone=UTC' // define a function to reformat the date field def convert(date:String) : String = { val df1 = new SimpleDateFormat("dd/MM/yyyy") val dt = df1.parse(date) val df2 = new SimpleDateFormat("yyyy-MM-dd") df2.format(dt) } // create and save oil price changes def createOilPriceDF(inputfile: String, outputfile: String, spark: SparkSession) = { val oilPriceDF = spark. read. option("header", "true"). option("inferSchema", "true"). csv(inputfile) val convertDateUDF = udf { (Date: String) => convert(Date) } val oilPriceDatedDF = oilPriceDF.withColumn("DATE", convertDateUDF(oilPriceDF("DATE"))) // offset to start at beginning of week val windowDF = oilPriceDatedDF.groupBy(window(oilPriceDatedDF.col("DATE"), "7 days", "7 days", "4 days")) val windowLastDF = windowDF.agg(last("PRICE") as "last(PRICE)").sort("window") // windowLastDF.show(20, false) val sortedWindow = Window.orderBy("window.start") val lagLastCol = lag(col("last(PRICE)"), 1).over(sortedWindow) val lagLastColDF = windowLastDF.withColumn("lastPrev(PRICE)", lagLastCol) // lagLastColDF.show(20, false) val simplePriceChangeFunc = udf { (last: Double, prevLast: Double) => var change = ((last - prevLast) compare 0).signum if (change == -1) change = 0 change.toDouble } val findDateTwoDaysAgoUDF = udf { (date: String) => val dateFormat = new SimpleDateFormat("yyyy-MM-dd") val cal = Calendar.getInstance cal.setTime(dateFormat.parse(date)) cal.add(Calendar.DATE, -3) dateFormat.format(cal.getTime) } val oilPriceChangeDF = lagLastColDF.withColumn("label", simplePriceChangeFunc( lagLastColDF("last(PRICE)"), lagLastColDF("lastPrev(PRICE)") )).withColumn("commonFriday", findDateTwoDaysAgoUDF(lagLastColDF("window.end"))) // oilPriceChangeDF.show(20, false) oilPriceChangeDF.select("label", "commonFriday"). write. format("com.databricks.spark.csv"). option("header", "true"). //.option("codec", "org.apache.hadoop.io.compress.GzipCodec") save(outputfile) } }
Example 14
Source File: ApplicationKeystore.scala From OUTDATED_ledger-wallet-android with MIT License | 5 votes |
package co.ledger.wallet.core.security import java.io.File import java.math.BigInteger import java.security.KeyStore.{PasswordProtection, PrivateKeyEntry} import java.security.{KeyStore, SecureRandom} import java.util.Calendar import javax.security.auth.x500.X500Principal import android.content.Context import co.ledger.wallet.core.crypto.Crypto import org.spongycastle.x509.X509V3CertificateGenerator import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future class ApplicationKeystore(context: Context, val keystoreName: String) extends Keystore(context) { def install(passwordProtection: PasswordProtection): Future[Keystore] = { assert(!file.exists(), "The keystore already exists") val keyStore = KeyStore.getInstance(KeyStore.getDefaultType) keyStore.load(null) val output = context.openFileOutput(keystoreName, Context.MODE_PRIVATE) keyStore.store(output, passwordProtection.getPassword) output.close() load(passwordProtection) } def setPassword(password: String): Unit = { require(password != null, "Password cannot be null") _password = password.toCharArray store() } override protected def loadJavaKeyStore(passwordProtection: PasswordProtection): Future[JavaKeyStore] = { Future { assert(file.exists(), "The keystore is not installed yet") val keystore = KeyStore.getInstance(KeyStore.getDefaultType) val input = context.openFileInput(keystoreName) keystore.load(input, passwordProtection.getPassword) input.close() _password = passwordProtection.getPassword keystore } } override def generateKey(alias: String): JavaKeyPair = { Crypto.ensureSpongyIsInserted() val kpg = java.security.KeyPairGenerator.getInstance("RSA") val calendar = Calendar.getInstance() val now = calendar.getTime calendar.add(Calendar.YEAR, 100) val end = calendar.getTime kpg.initialize(1024, new SecureRandom()) val keypair = kpg.generateKeyPair() val certGen = new X509V3CertificateGenerator() val dnName = new X500Principal("CN=Ledger") certGen.setSerialNumber(BigInteger.valueOf(System.currentTimeMillis())) certGen.setSubjectDN(dnName) certGen.setIssuerDN(dnName) certGen.setNotBefore(now) certGen.setNotAfter(end) certGen.setPublicKey(keypair.getPublic) certGen.setSignatureAlgorithm("SHA256WithRSAEncryption") val certificate = certGen.generate(keypair.getPrivate, "SC") javaKeystore.get.setEntry(alias, new PrivateKeyEntry(keypair.getPrivate, Array(certificate)), null) store() Crypto.ensureSpongyIsRemoved() keypair } def delete(): Unit = { file.delete() } def isInstalled = file.exists() private[this] def store(): Unit = { file.delete() val output = context.openFileOutput(keystoreName, Context.MODE_PRIVATE) javaKeystore.get.store(output, _password) output.close() } @inline private[this] def file = new File(context.getFilesDir, keystoreName) private var _password: Array[Char] = null }
Example 15
Source File: AndroidKeystore.scala From OUTDATED_ledger-wallet-android with MIT License | 5 votes |
package co.ledger.wallet.core.security import java.math.BigInteger import java.security.KeyStore import java.security.KeyStore.PasswordProtection import java.util.Calendar import javax.security.auth.x500.X500Principal import android.content.Context import android.security.KeyPairGeneratorSpec import co.ledger.wallet.core.crypto.Crypto import scala.concurrent.{Promise, Future} class AndroidKeystore(context: Context) extends Keystore(context) { override protected def loadJavaKeyStore(passwordProtection: PasswordProtection): Future[JavaKeyStore] = { Crypto.ensureSpongyIsRemoved() val keystore = KeyStore.getInstance("AndroidKeyStore") keystore.load(null) Future.successful(keystore) } override def generateKey(alias: String): JavaKeyPair = { Crypto.ensureSpongyIsRemoved() val kpg = java.security.KeyPairGenerator.getInstance("RSA", "AndroidKeyStore") val calendar = Calendar.getInstance() val now = calendar.getTime calendar.add(Calendar.YEAR, 100) val end = calendar.getTime kpg.initialize( new KeyPairGeneratorSpec.Builder(context.getApplicationContext) .setAlias(alias) .setStartDate(now) .setEndDate(end) .setSerialNumber(BigInteger.valueOf(1)) .setSubject(new X500Principal("CN=Ledger")) .build() ) kpg.generateKeyPair() } }
Example 16
Source File: DQCommandLineOptions.scala From DataQuality with GNU Lesser General Public License v3.0 | 5 votes |
package it.agilelab.bigdata.DataQuality.utils import java.util.{Calendar, Date} import org.joda.time import org.joda.time.DateTime import scopt.OptionParser case class DQCommandLineOptions(applicationConf: String, configFilePath: String, refDate: Date = new Date(), repartition: Boolean = false, local: Boolean = false) object DQCommandLineOptions { def parser(): OptionParser[DQCommandLineOptions] = new OptionParser[DQCommandLineOptions]("dataquality") { opt[String]('a', "application-conf") required () action { (x, c) => c.copy(applicationConf = x) } text "Path to application configuration file" opt[String]('c', "configFilePath") required () action { (x, c) => c.copy(configFilePath = x) } text "Path to run configuration file" opt[Calendar]('d', "reference-date") required () action { (x, c) => c.copy(refDate = x.getTime) } text "Indicates the date at which the DataQuality checks will be performed (format YYYY-MM-DD)" opt[Unit]('r', "repartition") optional () action { (_, c) => c.copy(repartition = true) } text "Specifies whether the application is repartitioning the input data" opt[Unit]('l', "local") optional () action { (_, c) => c.copy(local = true) } text "Specifies whether the application is operating in local mode" } }
Example 17
Source File: Footer.scala From scala-debugger with Apache License 2.0 | 5 votes |
package org.scaladebugger.docs.layouts.partials.common import java.net.URL import java.util.Calendar import org.scaladebugger.docs.styles.PageStyle import scalatags.Text.all._ object Footer { def apply(authorName: String, authorUrl: URL, startYear: Int): Modifier = { tag("footer")(PageStyle.footerCls, PageStyle.sectionDark)( div(PageStyle.footerContent)( span( raw("Site contents "), i(`class` := "fa fa-copyright", attr("aria-hidden") := "true"), raw(" "), a(href := authorUrl.toString)(authorName), raw(s", $startYear-${Calendar.getInstance().get(Calendar.YEAR)}") ) ) ) } }
Example 18
Source File: OutputInterceptorFactory.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.sessionmanager.service.sessionspawner.sparklauncher.outputintercepting import java.io.File import java.text.SimpleDateFormat import java.util.logging._ import java.util.{Calendar, UUID} import com.google.inject.Inject import com.google.inject.name.Named import org.apache.spark.launcher.SparkLauncher import ai.deepsense.commons.models.ClusterDetails case class OutputInterceptorHandle private [outputintercepting] ( private val logger: Logger, private val childProcLoggerName: String, private val loggerFileHandler: FileHandler ) { def attachTo(sparkLauncher: SparkLauncher): Unit = { sparkLauncher.setConf( "spark.launcher.childProcLoggerName", childProcLoggerName ) } def writeOutput(text: String): Unit = { logger.info(text) } def close(): Unit = { loggerFileHandler.close() } } class OutputInterceptorFactory @Inject()( @Named("session-executor.spark-applications-logs-dir") val executorsLogDirectory: String ) { def prepareInterceptorWritingToFiles(clusterDetails: ClusterDetails): OutputInterceptorHandle = { new File(executorsLogDirectory).mkdirs() val childProcLoggerName = s"WE-app-${UUID.randomUUID()}" val logger = Logger.getLogger(childProcLoggerName) val fileName = { val time = Calendar.getInstance().getTime() // Colons are not allowed in Windows filenames val format = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss") val formattedTime = format.format(time) val illegalFileNameCharactersRegExp = "[^a-zA-Z0-9.-]" s"$formattedTime-${clusterDetails.name.replaceAll(illegalFileNameCharactersRegExp, "_")}.log" } val fileHandler = new FileHandler(s"$executorsLogDirectory/$fileName") fileHandler.setFormatter(new SimpleFormaterWithoutOutputRedirectorNoise) logger.addHandler(fileHandler) sys.addShutdownHook { fileHandler.close() } OutputInterceptorHandle(logger, childProcLoggerName, fileHandler) } class SimpleFormaterWithoutOutputRedirectorNoise extends Formatter { val simpleFormatter = new SimpleFormatter override def format(logRecord: LogRecord): String = { val formatted = simpleFormatter.format(logRecord) val redirectorNoise = "org.apache.spark.launcher.OutputRedirector redirect\nINFO: " val beginningOfRedirectorNoise = formatted.indexOf(redirectorNoise) val endOfRedirectorNoise = if (beginningOfRedirectorNoise > 0) { beginningOfRedirectorNoise + redirectorNoise.length } else { 0 } formatted.substring(endOfRedirectorNoise) } } }
Example 19
Source File: Encoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.sql.{ Date, Timestamp, Types } import java.time.{ LocalDate, LocalDateTime } import java.util.{ Calendar, TimeZone } import java.{ sql, util } trait Encoders { this: JdbcContextBase[_, _] => type Encoder[T] = JdbcEncoder[T] protected val dateTimeZone = TimeZone.getDefault case class JdbcEncoder[T](sqlType: Int, encoder: BaseEncoder[T]) extends BaseEncoder[T] { override def apply(index: Index, value: T, row: PrepareRow) = encoder(index + 1, value, row) } def encoder[T](sqlType: Int, f: (Index, T, PrepareRow) => Unit): Encoder[T] = JdbcEncoder(sqlType, (index: Index, value: T, row: PrepareRow) => { f(index, value, row) row }) def encoder[T](sqlType: Int, f: PrepareRow => (Index, T) => Unit): Encoder[T] = encoder(sqlType, (index: Index, value: T, row: PrepareRow) => f(row)(index, value)) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = JdbcEncoder(e.sqlType, mappedBaseEncoder(mapped, e.encoder)) private[this] val nullEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setNull) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = JdbcEncoder( d.sqlType, (index, value, row) => value match { case Some(v) => d.encoder(index, v, row) case None => nullEncoder.encoder(index, d.sqlType, row) } ) implicit val stringEncoder: Encoder[String] = encoder(Types.VARCHAR, _.setString) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(Types.NUMERIC, (index, value, row) => row.setBigDecimal(index, value.bigDecimal)) implicit val byteEncoder: Encoder[Byte] = encoder(Types.TINYINT, _.setByte) implicit val shortEncoder: Encoder[Short] = encoder(Types.SMALLINT, _.setShort) implicit val intEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setInt) implicit val longEncoder: Encoder[Long] = encoder(Types.BIGINT, _.setLong) implicit val floatEncoder: Encoder[Float] = encoder(Types.FLOAT, _.setFloat) implicit val doubleEncoder: Encoder[Double] = encoder(Types.DOUBLE, _.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(Types.VARBINARY, _.setBytes) implicit val dateEncoder: Encoder[util.Date] = encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, new sql.Timestamp(value.getTime), Calendar.getInstance(dateTimeZone))) implicit val localDateEncoder: Encoder[LocalDate] = encoder(Types.DATE, (index, value, row) => row.setDate(index, Date.valueOf(value), Calendar.getInstance(dateTimeZone))) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, Timestamp.valueOf(value), Calendar.getInstance(dateTimeZone))) }
Example 20
Source File: Decoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jdbc import java.time.{ LocalDate, LocalDateTime } import java.util import java.util.Calendar import scala.math.BigDecimal.javaBigDecimal2bigDecimal trait Decoders { this: JdbcContextBase[_, _] => type Decoder[T] = JdbcDecoder[T] case class JdbcDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] { def apply(index: Index, row: ResultRow) = decoder(index + 1, row) } def decoder[T](d: BaseDecoder[T]): Decoder[T] = JdbcDecoder(d) def decoder[T](f: ResultRow => Index => T): Decoder[T] = decoder((index, row) => f(row)(index)) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = JdbcDecoder(mappedBaseDecoder(mapped, d.decoder)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = JdbcDecoder( (index, row) => { try { // According to the JDBC spec, we first need to read the object before `row.wasNull` works row.getObject(index) if (row.wasNull()) { None } else { Some(d.decoder(index, row)) } } catch { case _: NullPointerException if row.wasNull() => None } } ) implicit val stringDecoder: Decoder[String] = decoder(_.getString) implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder((index, row) => row.getBigDecimal(index)) implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) implicit val intDecoder: Decoder[Int] = decoder(_.getInt) implicit val longDecoder: Decoder[Long] = decoder(_.getLong) implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder(_.getBytes) implicit val dateDecoder: Decoder[util.Date] = decoder((index, row) => new util.Date(row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).getTime)) implicit val localDateDecoder: Decoder[LocalDate] = decoder((index, row) => row.getDate(index, Calendar.getInstance(dateTimeZone)).toLocalDate) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoder((index, row) => row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).toLocalDateTime) }
Example 21
Source File: JacksonMessageWriter.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.text.SimpleDateFormat import java.util.{Calendar, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes("utf-8")) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'") val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 22
Source File: DateHelper.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.test.util import java.sql.{ Date => sqlDate, Timestamp } import java.util.{ Calendar, Date, GregorianCalendar, TimeZone } object DateHelper { def dateFor(year: Int, month: Int, day: Int, hours: Int = 0, minutes: Int = 0, seconds: Int = 0, millis: Int = 0, tz: TimeZone = TimeZone.getDefault): Date = { import java.util.Calendar._ val cal = new GregorianCalendar() cal.setTimeZone(tz) if (year > 0) cal.set(YEAR, year) if (month >= 0) cal.set(MONTH, month) if (day > 0) cal.set(DAY_OF_MONTH, day) cal.set(HOUR_OF_DAY, hours) cal.set(MINUTE, minutes) cal.set(SECOND, seconds) cal.set(MILLISECOND, millis) cal.getTime } def timestampFor(year: Int, month: Int, day: Int, hours: Int = 0, minutes: Int = 0, seconds: Int = 0, millis: Int = 0, tz: TimeZone = TimeZone.getDefault): Timestamp = new Timestamp(dateFor(year, month, day, hours, minutes, seconds, millis, tz).getTime) def timeFor(hours: Int, minutes: Int, seconds: Int): Timestamp = { import java.util.Calendar val year = Calendar.getInstance().get(Calendar.YEAR) val month = Calendar.getInstance().get(Calendar.MONTH) val day = Calendar.getInstance().get(Calendar.DATE) timestampFor(year, month, day, hours, minutes, seconds) } def ansiDateFor(year: Int, month: Int, day: Int): sqlDate = new sqlDate(dateFor(year, month, day).getTime()) }
Example 23
Source File: huemul_DataLakeSetting.scala From huemul-bigdatagovernance with Apache License 2.0 | 5 votes |
package com.huemulsolutions.bigdata.datalake import java.util.Calendar import scala.collection.mutable.ArrayBuffer import com.huemulsolutions.bigdata.common.huemul_BigDataGovernance import com.huemulsolutions.bigdata.common.huemul_KeyValuePath import huemulType_FileType._ class huemul_DataLakeSetting(huemulBigDataGov: huemul_BigDataGovernance) extends Serializable { var rowDelimiterForPDF: String = "\\n" private var use_year: Integer = null def getuse_year: Integer = {return use_year} private var use_month: Integer = null def getuse_month: Integer = {return use_month} private var use_day: Integer = null def getuse_day: Integer = {return use_day} private var use_hour: Integer = null def getuse_hour: Integer = {return use_hour} private var use_minute: Integer = null def getuse_minute: Integer = {return use_minute} private var use_second: Integer = null def getuse_second: Integer = {return use_second} private var use_params: String = "" def getuse_params: String = {return use_params} def SetParamsInUse(ano: Integer, mes: Integer, dia: Integer, hora: Integer, min: Integer, seg: Integer, AdditionalParams: String){ use_year = ano use_month = mes use_day = dia use_hour = hora use_minute = min use_second = seg use_params = AdditionalParams } def GetFullNameWithPath() : String = { return GetPath(GlobalPath) + LocalPath + FileName } def GetDataBase(Division: ArrayBuffer[huemul_KeyValuePath]): String = { return huemulBigDataGov.GlobalSettings.GetDataBase(huemulBigDataGov, Division) } def GetDataBase(Division: ArrayBuffer[huemul_KeyValuePath], ManualEnvironment: String): String = { return huemulBigDataGov.GlobalSettings.GetDataBase(huemulBigDataGov, Division, ManualEnvironment) } def GetPath(Division: ArrayBuffer[huemul_KeyValuePath]): String = { return huemulBigDataGov.GlobalSettings.GetPath(huemulBigDataGov, Division) } def GetPath(Division: ArrayBuffer[huemul_KeyValuePath], ManualEnvironment: String): String = { return huemulBigDataGov.GlobalSettings.GetPath(huemulBigDataGov, Division, ManualEnvironment) } }
Example 24
Source File: JwtDirectivesSpec.scala From spray-jwt with MIT License | 5 votes |
package com.github.kikuomax.spray.jwt import com.nimbusds.jose.{ JWSAlgorithm, JWSObject } import com.nimbusds.jwt.JWTClaimsSet import java.util.Calendar import org.specs2.mutable.Specification import scala.concurrent.{ ExecutionContext, Future } import scala.concurrent.duration.{ Duration, SECONDS } import spray.routing.HttpService import spray.routing.authentication.UserPass import spray.testkit.Specs2RouteTest import JwtClaimBuilder.claimExpiration class JwtDirectivesSpec extends Specification with Specs2RouteTest with HttpService with JwtDirectives { // uses the one provided by spray.testkit override def actorRefFactory = system // implicit execution context implicit val executionContext = system.dispatcher // creates signer and verifier val signature = JwtSignature(JWSAlgorithm.HS256, "thisHasGotToBeAtleast32BitsLong.") // claims set builder that builds a claims set valid for one second val oneSecondBuilder: String => Option[JWTClaimsSet] = claimExpiration(Duration(1, SECONDS)) // route that builds a claims set valid for one second val oneSecondRoute = get { def authenticator = jwtAuthenticator(up => Future { up.map(_.user) })( oneSecondBuilder, signature.jwtSigner, executionContext) def authentication = authenticator(Some(UserPass("user", "pass"))).map { u => Right(u.get) } authenticate(authentication) { jws => complete(jws.serialize()) } } "One second claims set builder" should { "build claims set valid for one second" in { val now = Calendar.getInstance() val expireBefore = now.clone().asInstanceOf[Calendar] expireBefore.add(Calendar.SECOND, 2) // in 2 seconds Get() ~> oneSecondRoute ~> check { val jws = JWSObject.parse(responseAs[String]) val claims = JWTClaimsSet.parse(jws.getPayload().toJSONObject()) // expects claims set // should expire after now but in 2 seconds from `now` claims.getExpirationTime().after(now.getTime()) must beTrue claims.getExpirationTime().before(expireBefore.getTime()) must beTrue } } } }
Example 25
Source File: KafkaOffsetRevertTest.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.kafka import java.text.SimpleDateFormat import java.util.{Calendar, Date} import com.venn.common.Common import com.venn.util.MathUtil import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import scala.util.parsing.json.JSONObject val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS") def main(args: Array[String]): Unit = { val producer = new KafkaProducer[String, String](Common.getProp(true)) var i = 0; while (true) { // val map = Map("id"-> i, "createTime"-> sdf.format(System.currentTimeMillis())) val map = Map("id" -> i, "createTime" -> sdf.format(System.currentTimeMillis()), "amt" -> (MathUtil.random.nextInt(10) + "." + MathUtil.random.nextInt(10))) val jsonObject: JSONObject = new JSONObject(map) println(jsonObject.toString()) // topic current_day val msg = new ProducerRecord[String, String]("kafka_offset", jsonObject.toString()) producer.send(msg) producer.flush() Thread.sleep(1000) i = i + 1 // System.exit(-1) } } }
Example 26
Source File: SlotPartitionMaker.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.demo import java.text.SimpleDateFormat import java.util.{Calendar, Date} import com.venn.common.Common import com.venn.util.MathUtil import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import scala.util.parsing.json.JSONObject def getCreateTime(): String = { // minute = minute + 1 calendar.add(Calendar.MILLISECOND, 10) sdf.format(calendar.getTime) } val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS") def main(args: Array[String]): Unit = { val prop = Common.getProp prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer") prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer") val producer = new KafkaProducer[String, String](Common.getProp) calendar.setTime(new Date()) println(sdf.format(calendar.getTime)) var i = 0; while (true) { val map = Map("id" -> i, "createTime" -> getCreateTime(), "amt" -> (MathUtil.random.nextInt(10) + "." + MathUtil.random.nextInt(10))) val jsonObject: JSONObject = new JSONObject(map) println(jsonObject.toString()) // topic current_day val msg = new ProducerRecord[String, String]("slot_partition", jsonObject.toString()) producer.send(msg) producer.flush() if (MathUtil.random.nextBoolean()) { Thread.sleep(1500) } else { Thread.sleep(500) } i = i + 1 // System.exit(-1) } } }
Example 27
Source File: FileSinkMaker.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.filesink import java.text.SimpleDateFormat import java.util.Calendar import com.venn.common.Common import com.venn.stream.api.dayWindow.CurrentDayMaker.{calendar, getCreateTime, sdf} import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import scala.util.parsing.json.JSONObject object FileSinkMaker { val topic = "async" def main(args: Array[String]): Unit = { while (true) { left("roll_file_sink") Thread.sleep(100) } } val sdf = new SimpleDateFormat("yyyyMMddHHmmss") var idLeft = 0 def left(topic: String) = { val producer = new KafkaProducer[String, String](Common.getProp) idLeft = idLeft + 1 val map = Map("id" -> idLeft, "name" -> ("venn" + System.currentTimeMillis()), "date" -> getCreateTime) val jsonObject: JSONObject = new JSONObject(map) println("left : " + jsonObject.toString()) val msg = new ProducerRecord[String, String](topic, jsonObject.toString()) // producer.send(msg) // producer.flush() } var minute : Int = 1 val calendar: Calendar = Calendar.getInstance() def getCreateTime(): String = { // minute = minute + 1 calendar.add(Calendar.MINUTE, 10) sdf.format(calendar.getTime) } }
Example 28
Source File: WindowDemoMaker.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.trigger import java.text.SimpleDateFormat import java.util.{Calendar, Date} import com.venn.common.Common import com.venn.util.MathUtil import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import scala.util.parsing.json.JSONObject def getCreateTime(): String = { // minute = minute + 1 calendar.add(Calendar.MILLISECOND, 10) sdf.format(calendar.getTime) } val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS") def main(args: Array[String]): Unit = { val producer = new KafkaProducer[String, String](Common.getProp) calendar.setTime(new Date()) println(sdf.format(calendar.getTime)) var i = 0; while (true) { val map = Map("id" -> i, "createTime" -> getCreateTime(), "amt" -> (MathUtil.random.nextInt(10) + "." + MathUtil.random.nextInt(10))) val jsonObject: JSONObject = new JSONObject(map) println(jsonObject.toString()) // topic current_day val msg = new ProducerRecord[String, String]("current_day", jsonObject.toString()) producer.send(msg) producer.flush() if (MathUtil.random.nextBoolean()) { Thread.sleep(1500) } else { Thread.sleep(500) } i = i + 1 // System.exit(-1) } } }
Example 29
Source File: LateDataMaker.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.sideoutput.lateDataProcess import java.text.SimpleDateFormat import java.util.{Calendar, Date} import com.venn.common.Common import com.venn.util.MathUtil import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import scala.util.parsing.json.JSONObject def getCreateTime(): String = { // minute = minute + 1 calendar.add(Calendar.SECOND, 10) sdf.format(calendar.getTime) } val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS") def main(args: Array[String]): Unit = { val producer = new KafkaProducer[String, String](Common.getProp) calendar.setTime(new Date()) println(sdf.format(calendar.getTime)) var i =74540; while (true) { // val map = Map("id"-> i, "createTime"-> sdf.format(System.currentTimeMillis())) val map = Map("id"-> i, "createTime"-> getCreateTime(), "amt"-> (MathUtil.random.nextInt(10) +"." + MathUtil.random.nextInt(10))) val jsonObject: JSONObject = new JSONObject(map) println(jsonObject.toString()) // topic current_day val msg = new ProducerRecord[String, String]("late_data", jsonObject.toString()) producer.send(msg) producer.flush() Thread.sleep(200) i = i + 1 // System.exit(-1) } } }
Example 30
Source File: CurrentDayMaker.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.dayWindow import java.text.SimpleDateFormat import java.util.{Calendar, Date} import com.venn.common.Common import com.venn.util.MathUtil import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord} import scala.util.parsing.json.JSONObject def getCreateTime(): String = { // minute = minute + 1 calendar.add(Calendar.MINUTE, 10) sdf.format(calendar.getTime) } val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS") def main(args: Array[String]): Unit = { val producer = new KafkaProducer[String, String](Common.getProp) calendar.setTime(new Date()) println(sdf.format(calendar.getTime)) var i =0; while (true) { // val map = Map("id"-> i, "createTime"-> sdf.format(System.currentTimeMillis())) val map = Map("id"-> i, "createTime"-> getCreateTime(), "amt"-> (MathUtil.random.nextInt(10) +"." + MathUtil.random.nextInt(10))) val jsonObject: JSONObject = new JSONObject(map) println(jsonObject.toString()) // topic current_day val msg = new ProducerRecord[String, String]("current_day", jsonObject.toString()) producer.send(msg) producer.flush() Thread.sleep(1000) i = i + 1 // System.exit(-1) } } }
Example 31
Source File: DateUtilsTest.scala From bigdata-examples with Apache License 2.0 | 5 votes |
package com.timeyang.common.util import java.text.SimpleDateFormat import java.util.Calendar import org.junit.Test @Test class DateUtilsTest { @Test def test(): Unit = { // scalastyle:off println(DateUtils.current()) val formatter = new SimpleDateFormat("yyyyMMddHH") println(formatter.format(System.currentTimeMillis())) // scalastyle:on } @Test def testTime(): Unit = { val calendar = Calendar.getInstance() val hour = calendar.get(Calendar.HOUR_OF_DAY) // scalastyle:off println println(hour) // scalastyle:on println calendar.add(Calendar.MILLISECOND, 60 * 60 * 1000) val hourOfNext = calendar.get(Calendar.HOUR_OF_DAY) print(hourOfNext) } }
Example 32
Source File: JacksonMessageWriter.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.nio.charset.StandardCharsets import java.text.SimpleDateFormat import java.util.{Calendar, Locale, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8)) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'", Locale.US) val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 33
Source File: DataGens.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector import java.math.BigDecimal import java.{ sql => jsql } import java.util.Calendar import scala.collection.Seq import scala.util.Try import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import org.scalacheck.Gen import com.actian.spark_vector.colbuffer.util.MillisecondsInDay import java.math.RoundingMode object DataGens { import com.actian.spark_vector.DataTypeGens._ import org.scalacheck.Arbitrary._ import org.scalacheck.Gen._ import scala.collection.JavaConverters._ val DefaultMaxRows = 500 val booleanGen: Gen[Boolean] = arbitrary[Boolean] val byteGen: Gen[Byte] = arbitrary[Byte] val shortGen: Gen[Short] = arbitrary[Short] val intGen: Gen[Int] = arbitrary[Int] val longGen: Gen[Long] = arbitrary[Long] // FIXME allow arbitrary doubles (and filter externally for vector tests) val floatGen: Gen[Float] = arbitrary[Float].map(f => if (f.abs > 1e-38) f else 0.0f) // FIXME allow arbitrary doubles (and filter externally for vector tests) val doubleGen: Gen[Double] = for { neg <- arbitrary[Boolean] digits <- listOfN(12, choose(0, 9)) } yield s"${if (neg) "-" else ""}1.${digits.mkString("")}".toDouble val decimalGen: Gen[BigDecimal] = arbitrary[scala.BigDecimal].retryUntil(bd => bd.scale <= 12 && bd.scale >= 0 && bd.precision <= 26 && Try { new BigDecimal(bd.toString) }.isSuccess).map(bd => new BigDecimal(bd.toString)) private val dateValueGen: Gen[Long] = choose(-3600L * 1000 * 24 * 100000L, 3600L * 1000 * 24 * 100000L) // @note normalize getTime so that we don't have diffs more than 1 day in between our {JDBC,Spark}results val dateGen: Gen[jsql.Date] = dateValueGen.map(d => new jsql.Date(d / MillisecondsInDay * MillisecondsInDay)) val timestampGen: Gen[jsql.Timestamp] = for (ms <- dateValueGen) yield new jsql.Timestamp(ms) // FIXME allow empty strings (and filter externally for vector tests) // @note we do not allow invalid UTF8 chars to be generated (from D800 to DFFF incl) val stringGen: Gen[String] = listOfN(choose(1, 512).sample.getOrElse(1), arbitrary[Char]).map(_.mkString).map( s => s.filter(c => Character.isDefined(c) && c != '\u0000' && (c < '\uD800' || c > '\uDFFF')) ) def valueGen(dataType: DataType): Gen[Any] = dataType match { case BooleanType => booleanGen case ByteType => byteGen case ShortType => shortGen case IntegerType => intGen case LongType => longGen case FloatType => floatGen case DoubleType => doubleGen case TimestampType => timestampGen case DateType => dateGen case StringType => stringGen case _: DecimalType => decimalGen case _ => throw new Exception("Invalid data type.") } def nullableValueGen(field: StructField): Gen[Any] = { val gen = valueGen(field.dataType) if (field.nullable) frequency(1 -> gen, 10 -> const(null)) else gen } def rowGen(schema: StructType): Gen[Row] = sequence(schema.fields.map(f => nullableValueGen(f))).map(l => Row.fromSeq(l.asScala)) // TODO Huh? Why ju.ArrayList?!? def dataGenFor(schema: StructType, maxRows: Int): Gen[Seq[Row]] = for { numRows <- choose(1, maxRows) rows <- listOfN(numRows, rowGen(schema)) } yield rows case class TypedData(dataType: StructType, data: Seq[Row]) val dataGen: Gen[TypedData] = for { schema <- schemaGen data <- dataGenFor(schema, DefaultMaxRows) } yield TypedData(schema, data) val allDataGen: Gen[TypedData] = for { schema <- allTypesSchemaGen data <- dataGenFor(schema, DefaultMaxRows) } yield TypedData(schema, data) }
Example 34
Source File: TimeColumnBuffer.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.colbuffer.time import java.nio.ByteBuffer import java.sql.Timestamp import java.util.{ Calendar, TimeZone } import org.apache.spark.sql.catalyst.util.DateTimeUtils import com.actian.spark_vector.ComposePartial import com.actian.spark_vector.colbuffer._ import com.actian.spark_vector.colbuffer.util._ import com.actian.spark_vector.vector.VectorDataType private case class TimeColumnBufferParams(cbParams: ColumnBufferBuildParams, converter: TimeConversion.TimeConverter, adjustToUTC: Boolean = false) private[colbuffer] abstract class TimeColumnBuffer(p: TimeColumnBufferParams, valueWidth: Int) extends ColumnBuffer[Timestamp, Long](p.cbParams.name, p.cbParams.maxValueCount, valueWidth, valueWidth, p.cbParams.nullable) { private val ts = new Timestamp(System.currentTimeMillis()) private val cal = Calendar.getInstance override def put(source: Timestamp, buffer: ByteBuffer): Unit = { if (p.adjustToUTC) { TimeConversion.convertLocalTimestampToUTC(source, cal) } val convertedSource = p.converter.convert(TimeConversion.normalizeTime(source), p.cbParams.scale) putConverted(convertedSource, buffer) } protected def putConverted(converted: Long, buffer: ByteBuffer): Unit override def get(buffer: ByteBuffer): Long = { val deconvertedSource = p.converter.deconvert(getConverted(buffer), p.cbParams.scale) ts.setTime(TimeConversion.scaleNanos(deconvertedSource, MillisecondsScale)) ts.setNanos((deconvertedSource % PowersOfTen(NanosecondsScale)).toInt) if (p.adjustToUTC) { TimeConversion.convertUTCToLocalTimestamp(ts, cal) } DateTimeUtils.fromJavaTimestamp(ts) } protected def getConverted(buffer: ByteBuffer): Long } private class TimeIntColumnBuffer(p: TimeColumnBufferParams) extends TimeColumnBuffer(p, IntSize) { override protected def putConverted(converted: Long, buffer: ByteBuffer): Unit = buffer.putInt(converted.toInt) override protected def getConverted(buffer: ByteBuffer): Long = buffer.getInt() } private class TimeLongColumnBuffer(p: TimeColumnBufferParams) extends TimeColumnBuffer(p, LongSize) { override protected def putConverted(converted: Long, buffer: ByteBuffer): Unit = buffer.putLong(converted) override protected def getConverted(buffer: ByteBuffer): Long = buffer.getLong() } private class TimeNZLZConverter extends TimeConversion.TimeConverter { override def convert(unscaledNanos: Long, scale: Int): Long = TimeConversion.scaleNanos(unscaledNanos, scale) override def deconvert(scaledNanos: Long, scale: Int): Long = TimeConversion.unscaleNanos(scaledNanos, scale) } private class TimeTZConverter extends TimeConversion.TimeConverter { override def convert(unscaledNanos: Long, scale: Int): Long = (TimeConversion.scaleNanos(unscaledNanos, scale) << TimeMaskSize) override def deconvert(scaledNanos: Long, scale: Int): Long = TimeConversion.unscaleNanos(scaledNanos >> TimeMaskSize, scale) } private[colbuffer] object TimeColumnBuffer extends ColumnBufferBuilder { private final val (nzlzIntScaleBounds, nzlzLongScaleBounds) = ((0, 4), (5, 9)) private final val (tzIntScaleBounds, tzLongScaleBounds) = ((0, 1), (2, 9)) private val calIsNotUTC = Calendar.getInstance.getTimeZone != TimeZone.getTimeZone("UTC") private val buildNZPartial: PartialFunction[ColumnBufferBuildParams, TimeColumnBufferParams] = ofDataType(VectorDataType.TimeType) andThen { TimeColumnBufferParams(_, new TimeNZLZConverter(), calIsNotUTC) } private val buildLZPartial: PartialFunction[ColumnBufferBuildParams, TimeColumnBufferParams] = ofDataType(VectorDataType.TimeLTZType) andThen { TimeColumnBufferParams(_, new TimeNZLZConverter()) } private val buildNZLZ: PartialFunction[ColumnBufferBuildParams, ColumnBuffer[_, _]] = (buildNZPartial orElse buildLZPartial) andThenPartial { case nzlz if isInBounds(nzlz.cbParams.scale, nzlzIntScaleBounds) => new TimeIntColumnBuffer(nzlz) case nzlz if isInBounds(nzlz.cbParams.scale, nzlzLongScaleBounds) => new TimeLongColumnBuffer(nzlz) } private val buildTZPartial: PartialFunction[ColumnBufferBuildParams, TimeColumnBufferParams] = ofDataType(VectorDataType.TimeTZType) andThen { TimeColumnBufferParams(_, new TimeTZConverter()) } private val buildTZ: PartialFunction[ColumnBufferBuildParams, ColumnBuffer[_, _]] = buildTZPartial andThenPartial { case tz if isInBounds(tz.cbParams.scale, tzIntScaleBounds) => new TimeIntColumnBuffer(tz) case tz if isInBounds(tz.cbParams.scale, tzLongScaleBounds) => new TimeLongColumnBuffer(tz) } override private[colbuffer] val build: PartialFunction[ColumnBufferBuildParams, ColumnBuffer[_, _]] = buildNZLZ orElse buildTZ }
Example 35
Source File: DateColumnBuffer.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.colbuffer.singles import java.nio.ByteBuffer import java.sql.Date import java.util.Calendar import com.actian.spark_vector.colbuffer._ import com.actian.spark_vector.vector.VectorDataType private class DateColumnBuffer(p: ColumnBufferBuildParams) extends ColumnBuffer[Date, Int](p.name, p.maxValueCount, DateSize, DateSize, p.nullable) { private final val DaysBeforeEpoch = 719528 private final val JulianBoundary = 578101 private final val CenturyDays = 36524 override def put(source: Date, buffer: ByteBuffer): Unit = { val cal = Calendar.getInstance cal.set(source.getYear, source.getMonth, source.getDate()) val dayOfYear = cal.get(Calendar.DAY_OF_YEAR) val year = source.getYear + 1900 // Need to convert to proleptic gregorian calendar date var days = (year * 365) + ((year - 1) / 4) - (year / 100) + (year / 400) + dayOfYear // Need to adjust for error in Jan-Feb of certain century years if (year % 100 == 0 && year % 400 != 0 && dayOfYear < 61) days += 1 buffer.putInt(days) } override def get(buffer: ByteBuffer): Int = { val days = buffer.getInt() var offset = 0 // Need to convert from proleptic gregorian to julian if date before 1582/10/14 if (days < JulianBoundary) { val n = (days - 366) / CenturyDays offset = n - (n / 4 + 2) // Need to adjust for error in Jan-Feb of certain century years val cdays = days % CenturyDays val qdays = days % (CenturyDays * 4) if (qdays > 365 && cdays < 366 && cdays > (59 + n / 4)) { offset += 1 } } days - DaysBeforeEpoch + offset } } private[colbuffer] object DateColumnBuffer extends ColumnBufferBuilder { override private[colbuffer] val build: PartialFunction[ColumnBufferBuildParams, ColumnBuffer[_, _]] = ofDataType(VectorDataType.DateType) andThen { new DateColumnBuffer(_) } }
Example 36
Source File: InputSource.scala From flink-parameter-server with Apache License 2.0 | 5 votes |
package hu.sztaki.ilab.ps.matrix.factorization.utils import java.util.Calendar import hu.sztaki.ilab.ps.matrix.factorization.utils.InputTypes.{AnyOrWatermark, EventWithTimestamp} import org.apache.flink.streaming.api.functions.source.SourceFunction import org.apache.flink.streaming.api.watermark.Watermark import scala.io.{BufferedSource, Source} class InputSource[T <: EventWithTimestamp](dataFilePath: String, servingSpeed: Int, fromString: String => T, baseDataStartTime: Option[Long], simulationEndTime: Option[Long]) extends SourceFunction[T] { private val simEndTime: Long = simulationEndTime.getOrElse(0L) override def cancel(): Unit = ??? def createList(reader: BufferedSource): List[T] = { reader .getLines() .map(fromString) .toList } override def run(ctx: SourceFunction.SourceContext[T]): Unit = { val reader = Source.fromFile(dataFilePath) val events: List[T] = createList(reader) val dataStartTime: Long = baseDataStartTime match { case Some(dst) => dst case None => events.head.getEventTime } val sortedEvents = events .map(x => AnyOrWatermark(x.getEventTime, x)) .toArray val servingStartTime = Calendar.getInstance.getTimeInMillis sortedEvents .foreach( event => { val now = Calendar.getInstance().getTimeInMillis val servingTime = toServingTime(servingStartTime, dataStartTime, event.time) val waitTime = servingTime - now Thread.sleep(math.max(waitTime, 0)) event.value match { case v: T => ctx.collectWithTimestamp(v, event.time) case wm: Watermark => ctx.emitWatermark(wm) } }) } private def toServingTime(servingStartTime: Long, dataStartTime: Long, eventTime: Long) = { if(simEndTime != 0 && eventTime >= simEndTime){ val dataDiff = eventTime - simEndTime ((servingStartTime + (simEndTime / servingSpeed)) + dataDiff) - (dataStartTime / servingSpeed) } else{ val dataDiff = eventTime - dataStartTime servingStartTime + (dataDiff / servingSpeed) } } }
Example 37
Source File: TimeSpec.scala From ez-framework with Apache License 2.0 | 5 votes |
package com.ecfront.ez.framework.core.misc import java.text.SimpleDateFormat import java.util.{Calendar, Date} import com.ecfront.ez.framework.core.BasicSpec class TimeSpec extends BasicSpec { test("ZeroTimeOffset Test") { val dfd = new SimpleDateFormat("yyyyMMdd") def getZeroTimeOffset = { val currentTime = new Date() val currentDay = dfd.parse(dfd.format(currentTime)) val calendar = Calendar.getInstance() calendar.setTime(currentDay) calendar.add(Calendar.DATE, 1) calendar.getTime.getTime - currentTime.getTime } println(getZeroTimeOffset) Thread.sleep(10000) println(getZeroTimeOffset) val calendar = Calendar.getInstance() calendar.setTimeInMillis(new Date().getTime + getZeroTimeOffset) println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS").format(calendar.getTime)) } }
Example 38
Source File: TimeHelper.scala From ez-framework with Apache License 2.0 | 5 votes |
package com.ecfront.ez.framework.core.helper import java.text.SimpleDateFormat import java.time.ZonedDateTime import java.util.{Calendar, Date, TimeZone} object TimeHelper { val msf = new SimpleDateFormat("yyyyMMddHHmmssSSS") val sf = new SimpleDateFormat("yyyyMMddHHmmss") val mf = new SimpleDateFormat("yyyyMMddHHmm") val hf = new SimpleDateFormat("yyyyMMddHH") val df = new SimpleDateFormat("yyyyMMdd") val Mf = new SimpleDateFormat("yyyyMM") val yf = new SimpleDateFormat("yyyy") val yyyy_MM_dd_HH_mm_ss_SSS = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss:SSS") val yyyy_MM_dd_HH_mm_ss = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") val yyyy_MM_dd = new SimpleDateFormat("yyyy-MM-dd") def dateOffset(offsetValue: Int, offsetUnit: Int, currentTime: Long): Long = { val format = currentTime.toString.length match { case 8 => df case 10 => hf case 12 => mf case 14 => sf case 17 => msf } val calendar = Calendar.getInstance() calendar.setTime(format.parse(currentTime + "")) calendar.add(offsetUnit, offsetValue) format.format(calendar.getTime).toLong } def dateOffset(offsetValue: Int, offsetUnit: Int, currentDate: Date): Date = { val calendar = Calendar.getInstance() calendar.setTime(currentDate) calendar.add(offsetUnit, offsetValue) calendar.getTime } def utc2Local(utcTime: String, localTimePatten: String = "yyyy-MM-dd'T'HH:mm:ss"): String = { val utcDate = Date.from(ZonedDateTime.parse(utcTime).toInstant) val localF = new SimpleDateFormat(localTimePatten) localF.setTimeZone(TimeZone.getDefault) localF.format(utcDate.getTime) } }
Example 39
Source File: Output.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.spark.clustering.mtm import java.io._ import org.apache.spark.rdd.RDD import org.apache.spark.mllib.linalg.DenseVector import scala.sys.process._ import java.util.Calendar import java.text.SimpleDateFormat import java.io.File import java.io.FileWriter object Output extends Serializable { def saveStr(savingPath: String, value: String, fileName: String = "") = { s"mkdir -p ${savingPath}".! val finalPath = savingPath + fileName val fw = new FileWriter(finalPath, true) fw.write(value + "\n") fw.close() } def write(outputDir: String, datas: RDD[Array[Double]], model: AbstractModel, nbRowSOM:Int, nbColSOM: Int): String = { val now = Calendar.getInstance().getTime() val format = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss") val time = format.format(now) val dim = datas.first.size val datasWithIndex = datas.zipWithIndex.map(_.swap) val path: String = outputDir + "/EXP-" + time + "/" s"mkdir -p ${path}".! val mapMin = Array.fill[Byte](dim)(0).mkString(",") var header = "# mapDim=2 mapSize={"+ nbRowSOM +"," + nbColSOM + "}" header += " pointDim=" + dim + " pointRealDim=" + dim + " mapMin={" + mapMin + "}" val prototypes = model.prototypes.map( d => (d.id, d.point)).sortBy(_._1).map(_._2) println("Write Prototypes...") val protosString = prototypes.map( d => d.toArray.mkString(",")).mkString("\n") // Utiliser fileWriter saveStr(path, header + "\n" + protosString, "maps") val sumAffectedDatas = datas.map( d => (model.findClosestPrototype(d).id, 1)).reduceByKey{ case (sum1, sum2) => sum1 + sum2 }.collectAsMap // fill in all the prototypes that have 0 observations val card = (0 until prototypes.length).map( d => if (sumAffectedDatas.contains(d)) sumAffectedDatas(d) + "" else "0" ) println("Write Cardinalities...") var cardHeader = "# mapDim=2 mapSize={"+ nbRowSOM +"," + nbColSOM + "}" cardHeader += "pointDim=1 pointRealDim=0 mapMin={0} mapMax={0}" val cardStr = card.mkString("\n") saveStr(path, cardHeader + "\n" + cardStr, "cards") val affHeader = "# mapDim=1 mapSize={" + datas.count() + "} pointDim=1 pointRealDim=0 mapMin={0} mapMax={0}" val aff = datasWithIndex.map(d => (d._1, model.findClosestPrototype(d._2).id + "")).sortByKey().values.collect.mkString("\n") println("Write Affiliate...") saveStr(path, affHeader + "\n" + aff, "affs") println("Write Maps...") val maps = prototypes.zip(card).map(d => d._1.toArray.mkString(",") + "," + d._2).mkString("\n") saveStr(path, maps, "mapscard") println("Write successfully...") time } }
Example 40
Source File: EnvVarSpec.scala From skuber with Apache License 2.0 | 5 votes |
package skuber.json import org.specs2.mutable.Specification // for unit-style testing import org.specs2.execute.Result import org.specs2.execute.Failure import org.specs2.execute.Success import scala.math.BigInt import java.util.Calendar import skuber.EnvVar import format._ import play.api.libs.json._ class EnvVarSpec extends Specification { "This is a unit specification for the skuber formatter for env vars.\n ".txt // EnvVar reader and writer "An EnvVar can be read from json\n" >> { "this can be done for an env var with a field ref with a field path" >> { val env1 = Json.fromJson[EnvVar](Json.parse( """ |{ | "name": "PODNAME", | "valueFrom" : { | "fieldRef": { | "fieldPath": "metadata.name" | } | } |} """.stripMargin)).get val env2 = EnvVar("PODNAME", EnvVar.FieldRef("metadata.name")) env1 mustEqual env2 } } }
Example 41
Source File: Warn.scala From spatial with MIT License | 5 votes |
package emul import java.text.SimpleDateFormat import java.util.Calendar import java.io.PrintStream object Warn { val now = Calendar.getInstance().getTime val fmt = new SimpleDateFormat("dd_MM_yyyy_hh_mm_aa") val timestamp = fmt.format(now) var warns: Int = 0 lazy val log = new PrintStream(timestamp + ".log") def apply(x: => String): Unit = { log.println(x) warns += 1 } def close(): Unit = { if (warns > 0) { println(Warn.warns + " warnings occurred during program execution. See " + Warn.timestamp + ".log for details") log.close() } } }
Example 42
Source File: L5-7MultipleSocketStreams.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.SparkContext import org.apache.spark.SparkConf import org.apache.spark.streaming.{ Seconds, StreamingContext } import org.apache.spark.streaming.dstream.PairDStreamFunctions import java.util.Calendar object TripByYearMultiApp { def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: TripByYearMultiApp <appname> <hostname> <base_port> <num_of_sockets>") System.exit(1) } val Seq(appName, hostname, basePort, nSockets) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(10)) val streams = (0 to nSockets.toInt - 1).map(i => ssc.socketTextStream(hostname, basePort.toInt + i)) val uniStream = ssc.union(streams) uniStream .map(rec => rec.split(",")) .map(rec => (rec(13), rec(0).toInt)) .reduceByKey(_ + _) .map(pair => (pair._2, normalizeYear(pair._1))) .transform(rec => rec.sortByKey(ascending = false)) .saveAsTextFiles("TripByYear") ssc.start() ssc.awaitTermination() } def normalizeYear(s: String): String = { try { (Calendar.getInstance().get(Calendar.YEAR) - s.toInt).toString } catch { case e: Exception => s } } }
Example 43
Source File: SuggestionController.scala From Aton with GNU General Public License v3.0 | 5 votes |
package controllers import java.sql.Timestamp import java.util.Calendar import com.google.inject.Inject import model.form.SuggestionForm import model.{Role, Suggestion} import play.api.Environment import play.api.i18n.MessagesApi import services.{SuggestionService, UserService, state} import views.html._ import scala.concurrent.{ExecutionContext, Future} class SuggestionController @Inject()(suggestionService: SuggestionService, val messagesApi: MessagesApi)(implicit userService: UserService, executionContext: ExecutionContext, environment: Environment) extends ControllerWithNoAuthRequired { def home = AsyncStack { implicit request => implicit val (username: Option[String], isAdmin: Boolean) = loggedIn match { case Some(user) => (Some(user.username), user.role == Role.Administrator) case _ => (None, false) } if (isAdmin) { suggestionService.listAll.map { suggestions => Ok//(index(messagesApi("suggestion"), suggestionHome(SuggestionForm.form, suggestions))) } } else { Future.successful(Ok)//(index(messagesApi("suggestion"), suggestionHome(SuggestionForm.form, Seq.empty[Suggestion])))) } } def add = AsyncStack() { implicit request => implicit val (username: Option[String], isAdmin: Boolean) = loggedIn match { case Some(user) => (Some(user.username), user.role == Role.Administrator) case _ => (None, false) } SuggestionForm.form.bindFromRequest().fold( errorForm => Future.successful(Ok(errorForm.toString)), data => { val text = data.suggestion val suggestion = Suggestion(0, text, now, username) suggestionService.add(suggestion).map { case state.ActionCompleted => Redirect(routes.SuggestionController.home()) case _ => BadRequest } } ) } private def now = new Timestamp(Calendar.getInstance().getTime.getTime) }
Example 44
Source File: ExcelOutputWriter.scala From spark-hadoopoffice-ds with Apache License 2.0 | 5 votes |
package org.zuinnote.spark.office.excel import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp import java.text.DateFormat import java.text.SimpleDateFormat import java.util.Calendar import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.NullWritable import org.apache.hadoop.io.ArrayWritable import org.apache.hadoop.mapreduce.RecordWriter import org.apache.hadoop.mapreduce.TaskAttemptContext import org.apache.hadoop.fs.Path import org.apache.spark.sql.catalyst.{ CatalystTypeConverters, InternalRow } import org.apache.spark.sql.Row import org.apache.spark.sql.execution.datasources.OutputWriter import org.apache.spark.sql.types._ import org.zuinnote.hadoop.office.format.common.dao.SpreadSheetCellDAO import org.zuinnote.hadoop.office.format.common.HadoopOfficeWriteConfiguration import org.zuinnote.hadoop.office.format.common.util.msexcel.MSExcelUtil import org.zuinnote.hadoop.office.format.mapreduce._ import org.apache.commons.logging.LogFactory import org.apache.commons.logging.Log import org.zuinnote.hadoop.office.format.common.HadoopOfficeWriteConfiguration import java.util.Locale import java.text.DecimalFormat import org.zuinnote.hadoop.office.format.common.converter.ExcelConverterSimpleSpreadSheetCellDAO import java.text.NumberFormat // NOTE: This class is instantiated and used on executor side only, no need to be serializable. private[excel] class ExcelOutputWriter( path: String, dataSchema: StructType, context: TaskAttemptContext, options: Map[String, String]) extends OutputWriter { def write(row: Row): Unit = { // check useHeader if (useHeader) { val headers = row.schema.fieldNames var i = 0 for (x <- headers) { val headerColumnSCD = new SpreadSheetCellDAO(x, "", "", MSExcelUtil.getCellAddressA1Format(currentRowNum, i), defaultSheetName) recordWriter.write(NullWritable.get(), headerColumnSCD) i += 1 } currentRowNum += 1 useHeader = false } // for each value in the row if (row.size>0) { var currentColumnNum = 0; val simpleObject = new Array[AnyRef](row.size) for (i <- 0 to row.size - 1) { // for each element of the row val obj = row.get(i) if ((obj.isInstanceOf[Seq[String]]) && (obj.asInstanceOf[Seq[String]].length==5)) { val formattedValue = obj.asInstanceOf[Seq[String]](0) val comment = obj.asInstanceOf[Seq[String]](1) val formula = obj.asInstanceOf[Seq[String]](2) val address = obj.asInstanceOf[Seq[String]](3) val sheetName = obj.asInstanceOf[Seq[String]](4) simpleObject(i) = new SpreadSheetCellDAO(formattedValue,comment,formula,address,sheetName) } else { simpleObject(i)=obj.asInstanceOf[AnyRef] } } // convert row to spreadsheetcellDAO val spreadSheetCellDAORow = simpleConverter.getSpreadSheetCellDAOfromSimpleDataType(simpleObject, defaultSheetName, currentRowNum) // write it for (x<- spreadSheetCellDAORow) { recordWriter.write(NullWritable.get(), x) } } currentRowNum += 1 } override def close(): Unit = { recordWriter.close(context) currentRowNum = 0; } }
Example 45
Source File: AppendAction.scala From gatling-imap with GNU Affero General Public License v3.0 | 5 votes |
package com.linagora.gatling.imap.action import java.util.Calendar import akka.actor.Props import com.linagora.gatling.imap.check.ImapCheck import com.linagora.gatling.imap.protocol.{Command, UserId} import io.gatling.commons.validation.Validation import io.gatling.core.action.ValidatedActionActor import io.gatling.core.session._ import scala.collection.immutable.Seq object AppendAction { def props(imapContext: ImapActionContext, requestname: String, checks: Seq[ImapCheck], mailbox: Expression[String], flags: Expression[Option[Seq[String]]], date: Expression[Option[Calendar]], content: Expression[String]) = Props(new AppendAction(imapContext, requestname, checks, mailbox, flags, date, content)) } class AppendAction(val imapContext: ImapActionContext, val requestName: String, override val checks: Seq[ImapCheck], mailbox: Expression[String], flags: Expression[Option[Seq[String]]], date: Expression[Option[Calendar]], content: Expression[String]) extends ValidatedActionActor with ImapActionActor { override protected def executeOrFail(session: Session): Validation[_] = { for { mailbox <- mailbox(session) flags <- flags(session) date <- date(session) content <- content(session) } yield { val id: Long = session.userId val handler = handleResponse(session, imapContext.clock.nowMillis) sessions.tell(Command.Append(UserId(id), mailbox, flags, date, content), handler) } } }
Example 46
Source File: ImapSimpleScenario.scala From gatling-imap with GNU Affero General Public License v3.0 | 5 votes |
package com.linagora.gatling.imap.scenario import java.util.Calendar import com.linagora.gatling.imap.PreDef._ import com.linagora.gatling.imap.protocol.command.FetchAttributes.AttributeList import com.linagora.gatling.imap.protocol.command.MessageRange._ import com.linagora.gatling.imap.protocol.command.{MessageRanges, Silent, StoreFlags} import io.gatling.core.Predef._ import io.gatling.core.feeder.FeederBuilder import io.gatling.core.scenario.Simulation import io.gatling.core.structure.ScenarioBuilder import scala.collection.immutable.Seq import scala.concurrent.duration._ object ImapSimpleScenario extends Simulation { private val receiveEmail = exec(imap("append").append("INBOX", Some(Seq("\\Flagged")), Option.empty[Calendar], """From: [email protected] |To: [email protected] |Subject: test subject | |Test content |abcdefghijklmnopqrstuvwxyz |0123456789""".stripMargin).check(ok)) def apply(feeder: FeederBuilder): ScenarioBuilder = scenario("Imap") .feed(feeder) .pause(1 second) .exec(imap("Connect").connect()).exitHereIfFailed .exec(imap("login").login("${username}", "${password}").check(ok)) .during(1 minute) { exec(imap("list").list("", "*").check(ok, hasFolder("INBOX"))) .pause(200 milli) .exec(imap("select").select("INBOX").check(ok)) .pause(200 milli) .exec(receiveEmail) .pause(200 milli) .exec(imap("fetch").fetch(MessageRanges(Last()), AttributeList("BODY[HEADER]", "UID", "BODY[TEXT]")).check(ok)) .pause(200 milli) .exec(imap("store").store(MessageRanges(Last()), StoreFlags.add(Silent.Disable(), "\\Deleted")).check(ok)) .pause(200 milli) .exec(imap("expunge").expunge().check(ok)) .pause(200 milli) .exec(imap("fetch").fetch(MessageRanges(Last()), AttributeList("BODY[HEADER]", "UID", "BODY[TEXT]")).check(no)) } }
Example 47
Source File: ImapUIDFetchScenario.scala From gatling-imap with GNU Affero General Public License v3.0 | 5 votes |
package com.linagora.gatling.imap.scenario import java.util.Calendar import com.linagora.gatling.imap.PreDef._ import com.linagora.gatling.imap.protocol.command.FetchAttributes.AttributeList import com.linagora.gatling.imap.protocol.command.MessageRange._ import com.linagora.gatling.imap.protocol.command.MessageRanges import io.gatling.core.Predef._ import io.gatling.core.feeder.FeederBuilder import io.gatling.core.structure.ScenarioBuilder import scala.collection.immutable.Seq import scala.concurrent.duration._ object ImapUIDFetchScenario { val numberOfMailInInbox = 1000 private val appendGracePeriod = 5 milliseconds private val populateMailbox = exec(imap("append").append("INBOX", Option.empty[Seq[String]], Option.empty[Calendar], """From: [email protected] |To: [email protected] |Subject: test subject | |Test content |abcdefghijklmnopqrstuvwxyz |0123456789""".stripMargin).check(ok)) private val populateInbox = repeat(numberOfMailInInbox)(pause(appendGracePeriod).exec(populateMailbox)) private val uidFetch = exec(imap("uidFetch").uidFetch(MessageRanges(Range(1, numberOfMailInInbox)), AttributeList("UID")).check(ok)) def apply(feeder: FeederBuilder): ScenarioBuilder = scenario("Imap") .feed(feeder) .exec(imap("Connect").connect()).exitHereIfFailed .exec(imap("login").login("${username}", "${password}").check(ok)) .exec(imap("select").select("INBOX").check(ok)) .exec(populateInbox) .pause(1 second) .exec(repeat(100)(uidFetch)) }
Example 48
Source File: ImapExpungeScenario.scala From gatling-imap with GNU Affero General Public License v3.0 | 5 votes |
package com.linagora.gatling.imap.scenario import java.util.Calendar import com.linagora.gatling.imap.PreDef._ import com.typesafe.scalalogging.LazyLogging import io.gatling.core.Predef._ import io.gatling.core.feeder.FeederBuilder import io.gatling.core.scenario.Simulation import io.gatling.core.session.{Expression, _} import io.gatling.core.structure.ScenarioBuilder import scala.collection.immutable.Seq import scala.concurrent.duration._ import scala.util.Random object ImapExpungeScenario extends Simulation with LazyLogging { private val numberOfMailInInbox = Integer.getInteger("numberOfMailInInbox", 1000).intValue() private val percentageOfMailToExpunge = Integer.getInteger("percentageOfMailToExpunge", 20).toFloat private val maxDurationInMinutes = Integer.getInteger("maxDuration", 15).toFloat minutes logger.trace(s"numberOfMailInInbox $numberOfMailInInbox") logger.trace(s"percentageOfMailToExpunge $percentageOfMailToExpunge") private def getRandomDeleted(): Boolean = Random.nextFloat() < (percentageOfMailToExpunge/ 100.0) def flagsWithRandomDeletion: Expression[Session] = (session: Session) => { session.set("flags", if (getRandomDeleted()) Some(Seq("\\Flagged", "\\Deleted")) else Some(Seq("\\Flagged")) ) } private val populateMailbox = exec(imap("append").append("INBOX", "${flags}", Option.empty[Calendar], """From: [email protected] |To: [email protected] |Subject: test subject | |Test content |abcdefghijklmnopqrstuvwxyz |0123456789""".stripMargin).check(ok)) private val populateInbox = repeat(numberOfMailInInbox)(exec(flagsWithRandomDeletion).pause(5 millisecond).exec(populateMailbox)) def apply(feeder: FeederBuilder): ScenarioBuilder = scenario("Imap") .feed(feeder) .pause(1 second) .exec(imap("Connect").connect()).exitHereIfFailed .exec(imap("login").login("${username}", "${password}").check(ok)) .exec(imap("select").select("INBOX").check(ok)) .exec(populateInbox) .exec(imap("expunge").expunge().check(ok)) }
Example 49
Source File: kBCDriver.scala From spark-betweenness with Apache License 2.0 | 5 votes |
package com.centrality.kBC import java.util.Calendar import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.graphx.GraphLoader import org.apache.spark.graphx.PartitionStrategy object kBCDriver { def main(args: Array[String]) { // Create spark context val appName="kBCDriver" val conf = new SparkConf().setAppName(appName)//.setMaster(master) val sc = new SparkContext(conf) // Graph partition params val DEFAULT_K = 2 val DEFAULT_EDGE_PARTITIONS=60 val DEFAULT_CANONICAL_ORIENTATION=true val k = args(0).toInt println("k : " + k) val canonicalOrientation = DEFAULT_CANONICAL_ORIENTATION val numEdgePartitions = args(1).toInt // Input params val DEFAULT_INPUT_DIR="/tmp/input/" val DEFAULT_INPUT_FILE_NAME="edge_list.txt" val inputDir = args(2) val inputFileName = args(4) val inputPath = inputDir+inputFileName println("inputPath : " + inputPath) // Output params val DEFAULT_OUTPUT_DIR="/tmp/output/" val DEFAULT_V_OUTPUT_FILE=List(inputFileName,"kbc",k,"vertices").mkString("_")+".txt" val DEFAULT_E_OUTPUT_FILE=List(inputFileName,"kbc",k,"edges").mkString("_")+".txt" val outputDir = args(3) val outputVerticesFileName = sc.hadoopConfiguration.get("outputVerticesFileName", DEFAULT_V_OUTPUT_FILE) val outputEdgesFileName = sc.hadoopConfiguration.get("outputEdgesFileName", DEFAULT_E_OUTPUT_FILE) val outputVerticesPath = sc.hadoopConfiguration.get("outputVerticesPath", outputDir+outputVerticesFileName) val outputEdgesPath = sc.hadoopConfiguration.get("outputEdgesPath", outputDir+outputEdgesFileName) println("outputVerticesPath : " + outputVerticesPath) println("outputEdgesPath : " + outputEdgesPath) // Read graph val graph = GraphLoader.edgeListFile(sc, inputPath, canonicalOrientation, numEdgePartitions).partitionBy(PartitionStrategy.EdgePartition2D) println(Calendar.getInstance().getTime().toString + " vertices : " + graph.vertices.count()) println(Calendar.getInstance().getTime().toString + " edges : " + graph.edges.count()) // Run kBC println(Calendar.getInstance().getTime().toString + ": start kBC") val kBCGraph = KBetweenness.run(graph, k) // Save graph to file println(Calendar.getInstance().getTime().toString + ": saving results ") kBCGraph.vertices.coalesce(1).saveAsTextFile(outputVerticesPath) kBCGraph.edges.coalesce(1).saveAsTextFile(outputEdgesPath) } }
Example 50
Source File: Utils.scala From DataXServer with Apache License 2.0 | 5 votes |
package org.tianlangstudio.data.hamal.yarn.util import java.net.BindException import java.util.Calendar import org.apache.hadoop.yarn.api.records.{ApplicationAttemptId, ApplicationId, ContainerId, NodeId} import org.tianlangstudio.data.hamal.common.exp.DataHamalException import org.tianlangstudio.data.hamal.core.HamalConf private[hamal] object Utils { def isBindCollision(exception: Throwable): Boolean = { exception match { case e: BindException => if (e.getMessage != null) { return true } isBindCollision(e.getCause) case e: Exception => isBindCollision(e.getCause) case _ => false } } def containerIdNodeId2ExecutorId(containerId:ContainerId,nodeId:NodeId): String = { val appAttId= containerId.getApplicationAttemptId val applicationId = appAttId.getApplicationId val appClusterTs = applicationId.getClusterTimestamp val appId = applicationId.getId val attId = appAttId.getAttemptId val conId = containerId.getContainerId val nodeHost = nodeId.getHost val nodePort = nodeId.getPort s"$appClusterTs:$appId:$attId:$conId:$nodeHost:$nodePort" } def executorId2ContainerIdNodeId(executorId:String) = { executorId.split(":") match { case Array(appClusterTs,appId,attId,conId,nodeHost,nodePort) => val appAttId = ApplicationAttemptId.newInstance(ApplicationId.newInstance(appClusterTs.toLong,appId.toInt),attId.toInt) val containerId = ContainerId.newContainerId(appAttId,conId.toInt) val nodeId = NodeId.newInstance(nodeHost,nodePort.toInt); Some(containerId,nodeId) case _ => None } } private val taskIdLock = new Object private val preId = ""; def genTaskId():String = { val now = Calendar.getInstance val hour = now.get(Calendar.HOUR_OF_DAY) val min = now.get(Calendar.MINUTE) val seconds = now.get(Calendar.SECOND) val ms = now.get(Calendar.MILLISECOND) val id = (hour * 3600 * 1000 + min * 60 * 1000 + seconds * 1000 + ms) + "" taskIdLock.synchronized( if(id.equals(preId)) { genTaskId() }else { id } ) } }
Example 51
Source File: L5-6SocketStream.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.SparkContext import org.apache.spark.SparkConf import org.apache.spark.streaming.{ Seconds, StreamingContext } import org.apache.spark.streaming.dstream.PairDStreamFunctions import java.util.Calendar object TripByYearApp { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: TripByYearApp <appname> <hostname> <port>") System.exit(1) } val Seq(appName, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(10)) ssc.socketTextStream(hostname, port.toInt) .map(rec => rec.split(",")) .map(rec => (rec(13), rec(0).toInt)) .reduceByKey(_ + _) .map(pair => (pair._2, normalizeYear(pair._1))) .transform(rec => rec.sortByKey(ascending = false)) .saveAsTextFiles("TripByYear") ssc.start() ssc.awaitTermination() } def normalizeYear(s: String): String = { try { (Calendar.getInstance().get(Calendar.YEAR) - s.toInt).toString } catch { case e: Exception => s } } }
Example 52
Source File: SqlDataMapper.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.writer.mapper import java.util.{Calendar, Date} import com.basho.riak.client.core.query.timeseries.ColumnDescription.ColumnType import com.basho.riak.client.core.query.timeseries.{Cell, ColumnDescription, Row => RiakRow} import com.basho.riak.spark.rdd.BucketDef import com.basho.riak.spark.writer.ts.RowDef import com.basho.riak.spark.writer.{WriteDataMapper, WriteDataMapperFactory} import org.apache.spark.sql.types._ import org.apache.spark.sql.{Row => SparkRow} import com.basho.riak.spark.util.TSConversionUtil._ class SqlDataMapper[T <: SparkRow] extends WriteDataMapper[T, RowDef] { override def mapValue(row: T): RowDef = { Option(row.schema) match { case None => RowDef(createRowByType(row), None) case Some(schema) => val (riakRow, columnsDef) = createRiakRowBySchema(row, schema) RowDef(riakRow, columnsDef) } } } object SqlDataMapper { def factory[T <: SparkRow]: WriteDataMapperFactory[T, RowDef] = new WriteDataMapperFactory[T, RowDef] { override def dataMapper(bucket: BucketDef): WriteDataMapper[T, RowDef] = new SqlDataMapper[T] } }
Example 53
Source File: SimpleScalaRiakTSExample.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.examples import org.apache.spark.SparkConf import org.apache.spark.SparkContext import com.basho.riak.client.core.query.timeseries.Cell import com.basho.riak.client.core.query.timeseries.Row import com.basho.riak.client.core.util.BinaryValue import com.basho.riak.spark.rdd.RiakFunctions import com.basho.riak.spark.toSparkContextFunctions import java.util.Calendar import com.basho.riak.spark.rdd.RiakObjectData import com.basho.riak.client.core.operations.ts.StoreOperation import scala.collection.JavaConversions._ import com.basho.riak.client.core.query.Namespace import com.basho.riak.spark.util.RiakObjectConversionUtil import com.basho.riak.client.core.query.indexes.LongIntIndex , Cell.newTimestamp(x.time), new Cell(x.temperature), new Cell(x.humidity), new Cell(x.pressure) ) ) val storeOp = new StoreOperation.Builder(tableName).withRows(rows).build rf.withRiakDo(session => { val r = session.getRiakCluster.execute(storeOp).get assert(true) }) //Non-ts //noinspection ScalaStyle val ros = testData.map{ x => val obj = RiakObjectConversionUtil.to(x) obj.setContentType("application/json") obj.getIndexes.getIndex[LongIntIndex, LongIntIndex.Name](LongIntIndex.named("time")) .add(x.time) obj } rf.withRiakDo(session => { ros.foreach(ro => rf.createValueRaw(session, ns, ro, null, true)) }) } private def clearBucket(sparkConf: SparkConf): Unit = { val rf = RiakFunctions(sparkConf) rf.withRiakDo(session => { rf.resetAndEmptyBucket(ns) }) } private def setSparkOpt(sparkConf: SparkConf, option: String, defaultOptVal: String): SparkConf = { val optval = sparkConf.getOption(option).getOrElse(defaultOptVal) sparkConf.set(option, optval) } // Since quantum is equal to 1h, it begins at xx:00:00.000 private def beginingOfQuantumMillis(time: Long): Long = { val cal = Calendar.getInstance cal.setTimeInMillis(time) cal.set(Calendar.MINUTE, 0) cal.set(Calendar.SECOND, 0) cal.set(Calendar.MILLISECOND, 0) cal.getTimeInMillis } // Since quantum is equal to 1h, it ends at xx:59:59.999 private def endOfQuantumMillis(time: Long): Long = { val cal = Calendar.getInstance cal.setTimeInMillis(time) cal.set(Calendar.MINUTE, 59) cal.set(Calendar.SECOND, 59) cal.set(Calendar.MILLISECOND, 999) cal.getTimeInMillis } }
Example 54
Source File: InvitesDAOSpec.scala From crm-seed with Apache License 2.0 | 5 votes |
package com.dataengi.crm.identities.daos import java.util.{Calendar, UUID} import com.dataengi.crm.common.context.types._ import com.dataengi.crm.common.extensions.awaits._ import com.dataengi.crm.identities.context.AuthenticationContext import com.dataengi.crm.identities.models._ import org.specs2.runner.SpecificationsFinder import play.api.test.PlaySpecification class InvitesDAOSpec extends PlaySpecification with AuthenticationContext { sequential lazy val invitesDAO = application.injector.instanceOf[InvitesDAO] lazy val rolesDAO = application.injector.instanceOf[RolesDAO] lazy val TestRole = Role("TestRole", Seq(Permission(Actions.UsersManagement, PermissionStates.Allow))) var TestInvite = Invite("[email protected]", TestRole, Calendar.getInstance.getTimeInMillis, Calendar.getInstance.getTimeInMillis, InviteStatuses.Waiting, UUID.randomUUID(), Calendar.getInstance.getTimeInMillis) "InivitesDAO" should { "add invite and get by id" in { val addTestRoleResult = rolesDAO.add(TestRole).await() addTestRoleResult.isRight == true TestInvite = TestInvite.copy(role = TestRole.copy(id = Some(addTestRoleResult.value))) val addInviteResult = invitesDAO.add(TestInvite).await() addInviteResult.isRight === true val id = addInviteResult.value val allResult = invitesDAO.all.await() allResult.isRight === true val getByIdInvite = invitesDAO.get(id).await() getByIdInvite.isRight === true getByIdInvite.value.email === TestInvite.copy(id = Some(id)).email } "find exist company by email" in { val getByEmailResult = invitesDAO.find(TestInvite.email).await() getByEmailResult.isRight === true } "add and remove" in { val invite = TestInvite.copy(email = "[email protected]") val addInviteResult = invitesDAO.add(invite).await() addInviteResult.isRight === true val id = addInviteResult.value val getByIdInvite = invitesDAO.get(id).await() getByIdInvite.isRight === true getByIdInvite.value.email === invite.copy(id = Some(id)).email val removeInviteResult = invitesDAO.delete(id).await() removeInviteResult.isRight === true } "update company" in { val invite = TestInvite.copy(email = "[email protected]") val inviteId = invitesDAO.add(invite).await().value val updateForInvite = TestInvite.copy(email = "[email protected]").copy(id = Some(inviteId)) val updateInviteResult = invitesDAO.update(updateForInvite).await() updateInviteResult.isRight === true val getByIdInvite = invitesDAO.get(inviteId).await() getByIdInvite.isRight === true getByIdInvite.value.email === updateForInvite.email } } }
Example 55
Source File: GlobalVarsAndMethods.scala From Azure-Databricks-NYC-Taxi-Workshop with MIT License | 5 votes |
// Databricks notebook source //Database credentials & details - for use with Spark scala for writing // Secrets val jdbcUsername = dbutils.secrets.get(scope = "gws-sql-db", key = "username") val jdbcPassword = dbutils.secrets.get(scope = "gws-sql-db", key = "password") // JDBC driver class & connection properties val driverClass = "com.microsoft.sqlserver.jdbc.SQLServerDriver" val jdbcHostname = "gws-server.database.windows.net" val jdbcPort = 1433 val jdbcDatabase = "gws_sql_db" // JDBC URI val jdbcUrl = s"jdbc:sqlserver://${jdbcHostname}:${jdbcPort};database=${jdbcDatabase}" // Properties() object to hold the parameters import java.util.Properties val connectionProperties = new Properties() connectionProperties.put("user", s"${jdbcUsername}") connectionProperties.put("password", s"${jdbcPassword}") connectionProperties.setProperty("Driver", driverClass) // COMMAND ---------- def generateBatchID(): Int = { var batchId: Int = 0 var pushdown_query = "(select count(*) as record_count from BATCH_JOB_HISTORY) table_record_count" val df = spark.read.jdbc(url=jdbcUrl, table=pushdown_query, properties=connectionProperties) val recordCount = df.first().getInt(0) println("Record count=" + recordCount) if(recordCount == 0) batchId=1 else { pushdown_query = "(select max(batch_id) as current_batch_id from BATCH_JOB_HISTORY) current_batch_id" val df = spark.read.jdbc(url=jdbcUrl, table=pushdown_query, properties=connectionProperties) batchId = df.first().getInt(0) + 1 } batchId } // COMMAND ---------- import java.sql._ import java.util.Calendar def insertBatchMetadata(batchID: Int, processID: Int, activityName: String, activityStatus: String): Unit = { var conn: Connection = null var stmt: Statement = null val insertSql = """ |insert into batch_job_history (batch_id,batch_step_id,batch_step_description,batch_step_status,batch_step_time) |values (?,?,?,?,?) """.stripMargin try { Class.forName(driverClass) conn = DriverManager.getConnection(jdbcUrl, jdbcUsername, jdbcPassword) val preparedStmt: PreparedStatement = conn.prepareStatement(insertSql) preparedStmt.setInt(1, batchID) preparedStmt.setInt(2, processID) preparedStmt.setString(3, activityName) preparedStmt.setString(4, activityStatus) preparedStmt.setString(5, Calendar.getInstance().getTime().toString) preparedStmt.execute // cleanup preparedStmt.close conn.close } catch { case se: SQLException => se.printStackTrace case e: Exception => e.printStackTrace } finally { try { if (stmt!=null) stmt.close } catch { case se2: SQLException => // nothing we can do } try { if (conn!=null) conn.close } catch { case se: SQLException => se.printStackTrace } //end finally-try } //end try }
Example 56
Source File: 00-common.scala From Azure-Databricks-NYC-Taxi-Workshop with MIT License | 5 votes |
// Databricks notebook source //JDBC connectivity related val driverClass = "com.microsoft.sqlserver.jdbc.SQLServerDriver" val jdbcUsername = dbutils.secrets.get(scope = "gws-sql-db", key = "username") val jdbcPassword = dbutils.secrets.get(scope = "gws-sql-db", key = "password") val jdbcPort = 1433 val jdbcDatabase = "gws_sql_db" //Replace with your server name val jdbcHostname = "gws-server.database.windows.net" // Create the JDBC URL without passing in the user and password parameters. val jdbcUrl = s"jdbc:sqlserver://${jdbcHostname}:${jdbcPort};database=${jdbcDatabase}" // Create a Properties() object to hold the parameters. import java.util.Properties val connectionProperties = new Properties() connectionProperties.put("user", s"${jdbcUsername}") connectionProperties.put("password", s"${jdbcPassword}") connectionProperties.setProperty("Driver", driverClass) // COMMAND ---------- def generateBatchID(): Int = { var batchId: Int = 0 var pushdown_query = "(select count(*) as record_count from BATCH_JOB_HISTORY) table_record_count" val df = spark.read.jdbc(url=jdbcUrl, table=pushdown_query, properties=connectionProperties) val recordCount = df.first().getInt(0) println("Record count=" + recordCount) if(recordCount == 0) batchId=1 else { pushdown_query = "(select max(batch_id) as current_batch_id from BATCH_JOB_HISTORY) current_batch_id" val df = spark.read.jdbc(url=jdbcUrl, table=pushdown_query, properties=connectionProperties) batchId = df.first().getInt(0) + 1 } batchId } // COMMAND ---------- import java.sql._ import java.util.Calendar //Function to insert ETL batch metadata into RDBMS def insertBatchMetadata(batchID: Int, processID: Int, activityName: String, activityStatus: String): Unit = { var conn: Connection = null var stmt: Statement = null val insertSql = """ |insert into batch_job_history (batch_id,batch_step_id,batch_step_description,batch_step_status,batch_step_time) |values (?,?,?,?,?) """.stripMargin try { Class.forName(driverClass) conn = DriverManager.getConnection(jdbcUrl, jdbcUsername, jdbcPassword) val preparedStmt: PreparedStatement = conn.prepareStatement(insertSql) preparedStmt.setInt(1, batchID) preparedStmt.setInt(2, processID) preparedStmt.setString(3, activityName) preparedStmt.setString(4, activityStatus) preparedStmt.setString(5, Calendar.getInstance().getTime().toString) preparedStmt.execute // cleanup preparedStmt.close conn.close } catch { case se: SQLException => se.printStackTrace case e: Exception => e.printStackTrace } finally { try { if (stmt!=null) stmt.close } catch { case se2: SQLException => // nothing we can do } try { if (conn!=null) conn.close } catch { case se: SQLException => se.printStackTrace } //end finally-try } //end try }
Example 57
Source File: FutureValidatorForOptionSpec.scala From bean-validation-scala with MIT License | 5 votes |
package com.tsukaby.bean_validation_scala import java.util.{Calendar, Date, Locale} import javax.validation.constraints.Future import org.joda.time.DateTime import scala.annotation.meta.field class FutureValidatorForOptionSpec extends BaseSpec { private[this] case class TestBeanWithOptionCalendar( @(Future@field) value: Option[Calendar] ) private[this] case class TestBeanWithOptionDate( @(Future@field) value: Option[Date] ) private[this] case class TestBeanWithOptionDateTime( @(Future@field) value: Option[DateTime] ) val yesterday = DateTime.now().minusDays(1) val tomorrow = DateTime.now().plusDays(1) Seq( (TestBeanWithOptionCalendar(Some(yesterday.toCalendar(Locale.getDefault))), 1), (TestBeanWithOptionCalendar(Some(tomorrow.toCalendar(Locale.getDefault))), 0), (TestBeanWithOptionDate(Some(yesterday.toDate)), 1), (TestBeanWithOptionDate(Some(tomorrow.toDate)), 0), (TestBeanWithOptionDateTime(Some(yesterday)), 1), (TestBeanWithOptionDateTime(Some(tomorrow)), 0) ) foreach { case (bean, expected) => s"Check violations count. bean = $bean, count = $expected" >> { test(bean, expected) } } }
Example 58
Source File: PastValidatorForOptionSpec.scala From bean-validation-scala with MIT License | 5 votes |
package com.tsukaby.bean_validation_scala import java.util.{Locale, Date, Calendar} import javax.validation.constraints.Past import org.joda.time.DateTime import scala.annotation.meta.field class PastValidatorForOptionSpec extends BaseSpec { private[this] case class TestBeanWithOptionCalendar( @(Past@field) value: Option[Calendar] ) private[this] case class TestBeanWithOptionDate( @(Past@field) value: Option[Date] ) private[this] case class TestBeanWithOptionDateTime( @(Past@field) value: Option[DateTime] ) val tomorrow = DateTime.now().plusDays(1) val yesterday = DateTime.now().minusDays(1) Seq( (TestBeanWithOptionCalendar(Some(tomorrow.toCalendar(Locale.getDefault))), 1), (TestBeanWithOptionCalendar(Some(yesterday.toCalendar(Locale.getDefault))), 0), (TestBeanWithOptionDate(Some(tomorrow.toDate)), 1), (TestBeanWithOptionDate(Some(yesterday.toDate)), 0), (TestBeanWithOptionDateTime(Some(tomorrow)), 1), (TestBeanWithOptionDateTime(Some(yesterday)), 0) ) foreach { case (bean, expected) => s"Check violations count. bean = $bean, count = $expected" >> { test(bean, expected) } } }
Example 59
Source File: FutureValidatorForOption.scala From bean-validation-scala with MIT License | 5 votes |
package com.tsukaby.bean_validation_scala import java.util.{Calendar, Date} import javax.validation.constraints.Future import javax.validation.{ConstraintValidator, ConstraintValidatorContext} import org.hibernate.validator.internal.constraintvalidators.bv.future.{FutureValidatorForReadablePartial, FutureValidatorForReadableInstant, FutureValidatorForDate, FutureValidatorForCalendar} import org.joda.time.{ReadableInstant, ReadablePartial} class FutureValidatorForOption extends ConstraintValidator[Future, Option[_]] { private var constraintAnnotation: Future = null override def initialize(constraintAnnotation: Future): Unit = { this.constraintAnnotation = constraintAnnotation } override def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = { value match { case Some(x: Calendar) => val v = new FutureValidatorForCalendar v.initialize(constraintAnnotation) v.isValid(x, context) case Some(x: Date) => val v = new FutureValidatorForDate v.initialize(constraintAnnotation) v.isValid(x, context) case Some(x: ReadableInstant) => val v = new FutureValidatorForReadableInstant v.initialize(constraintAnnotation) v.isValid(x, context) case Some(x: ReadablePartial) => val v = new FutureValidatorForReadablePartial v.initialize(constraintAnnotation) v.isValid(x, context) case None => true case Some(_) => throw new IllegalStateException("oops.") } } }
Example 60
Source File: UserContext.scala From grpc-scala-microservice-kit with Apache License 2.0 | 5 votes |
package mu.node.echod.models import java.security.{PrivateKey, PublicKey} import java.util.Calendar import mu.node.echod.util.KeyUtils import pdi.jwt.Jwt import play.api.libs.json.Json import scala.util.Try case class UserContext(userId: String) extends KeyUtils { def toJwt(expiryMillis: Long, jwtSigningKey: PrivateKey): String = { val json = s"""{ | "sub": "$userId", | "exp": $expiryMillis |} |""".stripMargin Jwt.encode(json, jwtSigningKey, jwtDsa) } } object UserContext extends KeyUtils { def fromJwt(jwt: String, jwtVerificationKey: PublicKey): Option[UserContext] = { Jwt .decode(jwt, jwtVerificationKey, Seq(jwtDsa)) .flatMap(payload => Try(Json.parse(payload))) .toOption .filter(json => (json \ "exp").asOpt[Long].exists(notExpired)) .flatMap(json => (json \ "sub").asOpt[String].map(UserContext(_))) } private def notExpired(expiryMillis: Long): Boolean = expiryMillis > Calendar.getInstance().getTimeInMillis }
Example 61
Source File: PastValidatorForOption.scala From bean-validation-scala with MIT License | 5 votes |
package com.tsukaby.bean_validation_scala import java.util.{Calendar, Date} import javax.validation.constraints.Past import javax.validation.{ConstraintValidator, ConstraintValidatorContext} import org.hibernate.validator.internal.constraintvalidators.bv.past.{PastValidatorForReadablePartial, PastValidatorForReadableInstant, PastValidatorForDate, PastValidatorForCalendar} import org.joda.time.{ReadableInstant, ReadablePartial} class PastValidatorForOption extends ConstraintValidator[Past, Option[_]] { private var constraintAnnotation: Past = null override def initialize(constraintAnnotation: Past): Unit = { this.constraintAnnotation = constraintAnnotation } override def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = { value match { case Some(x: Calendar) => val v = new PastValidatorForCalendar v.initialize(constraintAnnotation) v.isValid(x, context) case Some(x: Date) => val v = new PastValidatorForDate v.initialize(constraintAnnotation) v.isValid(x, context) case Some(x: ReadableInstant) => val v = new PastValidatorForReadableInstant v.initialize(constraintAnnotation) v.isValid(x, context) case Some(x: ReadablePartial) => val v = new PastValidatorForReadablePartial v.initialize(constraintAnnotation) v.isValid(x, context) case None => true case Some(_) => throw new IllegalStateException("oops.") } } }
Example 62
Source File: StatsSender.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.stats import java.net.{DatagramPacket, DatagramSocket, InetAddress} import java.util.Calendar import java.text.SimpleDateFormat import akka.actor.{Actor, ActorSystem, Props} import akka.actor.Actor.Receive case class Message(msg: String, host: String, port: Int) class SenderActor extends Actor { private val dsocket = new DatagramSocket() sys.addShutdownHook { dsocket.close() } override def receive: Receive = { case Message(msg, host, port) => val address = InetAddress.getByName(host) val packet = new DatagramPacket(msg.getBytes(), msg.length, address, port) dsocket.send(packet) } } class StatsSender(path: String, host: String = "localhost", port: Int = 8125) { object Sender { val system = ActorSystem("mySystem") val actor = system.actorOf(Props[SenderActor], "SenderActor") def send(message: String) { actor ! Message(message, host, port) } } private def getCurrentTimeStr: String = { val now = Calendar.getInstance().getTime() val dateFormat = new SimpleDateFormat("ddMMyyyy_hhmm") dateFormat.format(now) } private def getMachineName: String = { java.net.InetAddress.getLocalHost().getHostName().split('.')(0) } private def getName(p: String, action: String): String = { p.replace("{MachineName}", getMachineName).replace("{DateTime}", getCurrentTimeStr) + "." + action .replace(".", "-") .replace(" ", "_") } def sendCounts(action: String, num: Int) { val message = getName(path, action) + ":" + num + "|c" Sender.send(message) } def sendTimings(action: String, num: Int) { val message = getName(path, action) + ":" + num + "|ms" Sender.send(message) } def sendGauges(action: String, num: Int) { val message = getName(path, action) + ":" + num + "|g" Sender.send(message) } def sendSets(action: String) { val message = getName(path, action) + "|s" Sender.send(message) } }
Example 63
Source File: httpserverplugin_staticfile.scala From scalabpe with Apache License 2.0 | 5 votes |
package scalabpe.plugin.http import java.io.File import java.net.URLEncoder import java.text.SimpleDateFormat import java.util.Calendar import java.util.GregorianCalendar import java.util.Locale import java.util.TimeZone import scala.collection.mutable.HashMap import org.jboss.netty.handler.codec.http.HttpHeaders import scalabpe.core.HashMapStringAny class StaticFilePlugin extends HttpServerPlugin with HttpServerStaticFilePlugin { val ETAG_TAG = "etag" val EXPIRE_TAG = "expire" val ATTACHMENT = "attachment" val FILENAME = "filename" val HTTP_DATE_FORMAT = "EEE, dd MMM yyyy HH:mm:ss zzz"; val HTTP_DATE_GMT_TIMEZONE = "GMT"; val df_tl = new ThreadLocal[SimpleDateFormat]() { override def initialValue(): SimpleDateFormat = { val df = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US) df.setTimeZone(TimeZone.getTimeZone(HTTP_DATE_GMT_TIMEZONE)); df } } def generateStaticFile(serviceId: Int, msgId: Int, errorCode: Int, errorMessage: String, body: HashMapStringAny, pluginParam: String, headers: HashMap[String, String]): String = { if (body.ns(FILENAME) == "") { return null } val filename = body.ns(FILENAME) if (!new File(filename).exists()) { return null } if (body.ns(ETAG_TAG) != "") { headers.put("ETag", body.ns(ETAG_TAG)) } if (body.ns(EXPIRE_TAG) != "") { body.i(EXPIRE_TAG) match { case 0 | -1 => headers.put(HttpHeaders.Names.CACHE_CONTROL, "no-cache") case n => // seconds val time = new GregorianCalendar(); time.add(Calendar.SECOND, n); headers.put(HttpHeaders.Names.EXPIRES, df_tl.get.format(time.getTime())); headers.put(HttpHeaders.Names.CACHE_CONTROL, "max-age=" + n); } } val ext = parseExt(filename) if (ext != "") body.put("__file_ext__", ext) if (body.ns(ATTACHMENT, "1") == "1") { val filename = body.ns(FILENAME) val v = "attachment; filename=\"%s\"".format(URLEncoder.encode(parseFilename(filename), "UTF-8")) headers.put("Content-Disposition", v) } filename } def parseFilename(name: String): String = { val p = name.lastIndexOf("/") if (p < 0) return name name.substring(p + 1) } def parseExt(name: String): String = { val p = name.lastIndexOf(".") if (p < 0) return "" name.substring(p + 1).toLowerCase() } }
Example 64
Source File: spark_algo.scala From mllib_subpackage with Apache License 2.0 | 5 votes |
import org.apache.commons.cli.{Options, PosixParser} import org.apache.spark.SparkContext import org.apache.spark.SparkConf //import org.apache.hadoop.fs import java.util.Date import java.util.Calendar import org.apache.hadoop.fs.FileSystem //import sun.management.FileSystem object spark_algo { def main(args: Array[String]) { // Input Params val parser = new PosixParser( ) val options = new Options( ) options.addOption("a", "algo", true, "algo type; 10. sgd 11. lbfgs") val cl = parser.parse( options, args, true ) val algo = cl.getOptionValue("algo") val conf = new SparkConf() val sc = new SparkContext(conf) sc.getConf.getAll.foreach(println) val configuration = sc.hadoopConfiguration configuration.setBoolean("mapreduce.output.fileoutputformat.compress", false) val fs = FileSystem.get(configuration) val modeltmp = if(algo=="10" || algo=="11" || algo=="12" || algo=="13") { new mllib_lr(sc, fs, args) } else if(algo=="21") { new ftrl(sc, fs, args) } else if(algo=="22") { new ftrl_batch(sc, fs, args) } else if(algo=="31") { new relative(sc, fs, args) } else if(algo=="40") { new mllib_gbdt(sc, fs, args) } else if(algo=="41") { new lambda_mart(sc, fs, args) } else if(algo=="91") { new feature_analyse(sc, fs, args) } else if(algo=="docs_words_analyse") { new docs_words_analyse(sc, fs, args) } val model = modeltmp.asInstanceOf[malgo] model.deal() } }
Example 65
Source File: QueryExecutorWithLogging.scala From variantsdwh with Apache License 2.0 | 5 votes |
package pl.edu.pw.ii.zsibio.dwh.benchmark.utils import java.io.{File, FileOutputStream, PrintWriter} import java.util.Calendar import pl.edu.pw.ii.zsibio.dwh.benchmark.dao.{ConnectDriver, EngineConnection, QueryResult} import net.jcazevedo.moultingyaml._ import net.jcazevedo.moultingyaml.DefaultYamlProtocol import net.jcazevedo.moultingyaml.DefaultYamlProtocol._ import org.apache.log4j.Logger import pl.edu.pw.ii.zsibio.dwh.benchmark.dao.ConnectDriver.Value import pl.edu.pw.ii.zsibio.dwh.benchmark.utils.QueryType.QueryType case class Query(queryId:String, queryType:String, queryEngine:String, storageFormat:String,queryDesc:String, statement:String) object QueryType extends Enumeration { type QueryType = Value val SELECT, CREATE, UPDATE = Value } object QueryExecutorWithLogging { val log = Logger.getLogger("pl.edu.pw.ii.zsibio.dwh.benchmark.utils.QueryExecutorWithLogging") object QueryYamlProtocol extends DefaultYamlProtocol { implicit val queryFormat = yamlFormat6(Query) } def runStatement(query: Query, conn:EngineConnection, logFile:String, dryRun: Boolean) = { log.info(s"Running ${query.queryId} ... using ${query.queryEngine} engine") log.debug(s"Executing query: ${query.statement}") query.queryType.toLowerCase() match { case "select" => logQuery(conn, query, logFile, dryRun) case _ => conn.executeUpdate(query.statement.toLowerCase) } } def parseQueryYAML(file:String,storageType:String,connString:String, kuduMaster:String, dbName:String, ifExplain:Boolean = false) : Query ={ log.info(s"Parsing ${file}") val lines = scala.io.Source.fromFile(file).mkString val yml = lines.stripMargin.parseYaml import QueryYamlProtocol._ queryPreprocess(yml.convertTo[Query], storageType, connString, kuduMaster, dbName, ifExplain) } private def logQuery(conn:EngineConnection, query: Query, logFile:String, dryRun:Boolean) ={ val rs = conn.executeQuery(query.statement.toLowerCase,true) //rs.rs.next() val result = s"${Calendar.getInstance().getTime().toString},${query.queryId}," + s"${query.queryEngine},${query.storageFormat},${rs.timing.get.getTiming()},${dryRun.toString}\n" log.info(s"Result: ${result}") val writer = new PrintWriter(new FileOutputStream(new File(logFile),true)) writer.write(result) writer.flush() writer.close() } private def queryPreprocess(query: Query, storageType: String, connString: String, kuduMaster: String, dbName: String, ifExplain: Boolean) = { def replaceVars(property:String) ={ property .replaceAll("\\{\\{DATA_FORMAT\\}\\}",storageType.toLowerCase) .replaceAll("\\{\\{DB_NAME\\}\\}",dbName.toLowerCase) .replaceAll("\\{\\{KUDU_MASTER\\}\\}",kuduMaster ) .replaceAll("\\{\\{IF_EXPLAIN\\}\\}", if(ifExplain) "EXPLAIN " else "") .replaceAll("\\{\\{PERCENTILE_APPROX\\}\\}", if(query.queryEngine.toLowerCase=="presto") "approx_percentile" else "percentile_approx") } query.copy( queryId = replaceVars(query.queryId), queryDesc = replaceVars(query.queryDesc), storageFormat = replaceVars(query.storageFormat), statement = replaceVars(query.statement.replaceAll(",",",\n").replaceAll("\\(","\\( ")) ) } }
Example 66
Source File: PlainText.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.utils.meta import java.text.SimpleDateFormat import java.util.Calendar import java.util.TimeZone import org.clulab.wm.eidos.utils.EidosException import org.clulab.timenorm.scate.SimpleInterval import org.clulab.wm.eidos.context.DCT import org.clulab.wm.eidos.document.Metadata class PlainText(text: String, titleOpt: Option[String] = None, idOpt: Option[String] = None, dateOpt: Option[String] = None, locationOpt: Option[String] = None ) extends EidosText { protected val metadata = { val dctOpt: Option[DCT] = { dateOpt.map { date => val calendar = try { val parsed = PlainText.dateFormat.parse(date) val calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")) calendar.setTime(parsed) calendar } catch { case throwable: Throwable => throw new EidosException(s"""Could not decipher "${date}" as a date""", throwable) } val simpleInterval = SimpleInterval.of(calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH) + 1, calendar.get(Calendar.DAY_OF_MONTH)) DCT(simpleInterval, date) } } new Metadata(dctOpt, idOpt, titleOpt, locationOpt) } def getText: String = text def getMetadata: Metadata = metadata } object PlainText { protected val dateFormat: SimpleDateFormat = { val dateFormat = new SimpleDateFormat("yyyy-MM-dd") val timeZone = TimeZone.getTimeZone("UTC") dateFormat.setTimeZone(timeZone) dateFormat } }
Example 67
Source File: UserContextSpec.scala From grpc-scala-microservice-kit with Apache License 2.0 | 5 votes |
package mu.node.echod import java.util.Calendar import mu.node.echod.models.UserContext import mu.node.echod.util.KeyUtils import pdi.jwt.Jwt import scala.concurrent.duration._ class UserContextSpec extends BaseSpec with KeyUtils { val jwtSigningKey = loadPkcs8PrivateKey( pathForTestResourcePath(config.getString("jwt.signing-key"))) "The UserContext companion object" when { val userId = "8d5921be-8f85-11e6-ae22-56b6b6499611" val futureExpiry = Calendar.getInstance().getTimeInMillis + Duration(5, MINUTES).toMillis val validClaim = s"""|{ | "sub": "$userId", | "exp": $futureExpiry |}""".stripMargin "asked to create a UserContext from a valid, signed JWT" should { "return the UserContext" in { val validJwt = Jwt.encode(validClaim, jwtSigningKey, jwtDsa) UserContext.fromJwt(validJwt, jwtVerificationKey) shouldEqual Some(UserContext(userId)) } } "asked to create UserContext from an unsigned JWT" should { "return None" in { val unsignedJwt = Jwt.encode(validClaim) UserContext.fromJwt(unsignedJwt, jwtVerificationKey) shouldEqual None } } "asked to create UserContext from a JWT with an invalid claim" should { "return None" in { val invalidClaim = s"""{ "unknownField": "value" }""" val invalidJwt = Jwt.encode(invalidClaim, jwtSigningKey, jwtDsa) UserContext.fromJwt(invalidJwt, jwtVerificationKey) shouldEqual None } } "asked to create UserContext from a JWT with an invalid payload" should { "return None" in { val invalidPayload = "malformed JSON" val invalidJwt = Jwt.encode(invalidPayload, jwtSigningKey, jwtDsa) UserContext.fromJwt(invalidJwt, jwtVerificationKey) shouldEqual None } } } }
Example 68
Source File: EchoServerSpec.scala From grpc-scala-microservice-kit with Apache License 2.0 | 5 votes |
package mu.node.echod import java.util.Calendar import mu.node.echo.SendMessageRequest import grpc.{AccessTokenCallCredentials, EchoClient} import mu.node.echod.models.UserContext import org.scalatest.BeforeAndAfterAll import scala.concurrent.duration._ class EchoServerSpec extends BaseSpec with BeforeAndAfterAll { val jwtSigningKey = loadPkcs8PrivateKey( pathForTestResourcePath(config.getString("jwt.signing-key"))) val echoServiceStub = EchoClient.buildServiceStub(config, fileForTestResourcePath) override def beforeAll(): Unit = { echoServer.start() } override def afterAll(): Unit = { echoServer.shutdown() } "The echod gRPC server" when { "sent a valid, authenticated SendMessageRequest" should { "reply back with the Message" in { val userId = "8d5921be-8f85-11e6-ae22-56b6b6499611" val futureExpiry = Calendar.getInstance().getTimeInMillis + Duration(5, MINUTES).toMillis val jwt = UserContext(userId).toJwt(futureExpiry, jwtSigningKey) val sendMessage = echoServiceStub .withCallCredentials(new AccessTokenCallCredentials(jwt)) .send(SendMessageRequest("hello")) whenReady(sendMessage) { reply => reply.messageId.nonEmpty shouldBe true reply.senderId shouldEqual userId reply.content shouldEqual "hello" } } } "sent an unauthenticated SendMessageRequest" should { "return an exception indicating that the call was unauthenticated" in { val sendMessage = echoServiceStub.send(SendMessageRequest("test")) whenReady(sendMessage.failed) { ex => ex shouldBe a[Exception] ex.getMessage shouldEqual "UNAUTHENTICATED" } } } "sent a SendMessageRequest with an expired access token" should { "reply back with the Message" in { val userId = "8d5921be-8f85-11e6-ae22-56b6b6499611" val lapsedExpiry = Calendar.getInstance().getTimeInMillis - Duration(5, MINUTES).toMillis val jwt = UserContext(userId).toJwt(lapsedExpiry, jwtSigningKey) val sendMessage = echoServiceStub .withCallCredentials(new AccessTokenCallCredentials(jwt)) .send(SendMessageRequest("hello")) whenReady(sendMessage.failed) { ex => ex shouldBe a[Exception] ex.getMessage shouldEqual "UNAUTHENTICATED" } } } "sent a SendMessageRequest with an invalid access token" should { "return an exception indicating that the call was unauthenticated" in { val sendMessage = echoServiceStub .withCallCredentials(new AccessTokenCallCredentials("bad jwt")) .send(SendMessageRequest("hello")) whenReady(sendMessage.failed) { ex => ex shouldBe a[Exception] ex.getMessage shouldEqual "UNAUTHENTICATED" } } } } }
Example 69
Source File: DateUtils.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import java.sql.Date import java.text.SimpleDateFormat import java.util.{Calendar, TimeZone} import org.apache.spark.sql.catalyst.expressions.Cast object DateUtils { private val MILLIS_PER_DAY = 86400000 // Java TimeZone has no mention of thread safety. Use thread local instance to be safe. private val LOCAL_TIMEZONE = new ThreadLocal[TimeZone] { override protected def initialValue: TimeZone = { Calendar.getInstance.getTimeZone } } private def javaDateToDays(d: Date): Int = { millisToDays(d.getTime) } // we should use the exact day as Int, for example, (year, month, day) -> day def millisToDays(millisLocal: Long): Int = { ((millisLocal + LOCAL_TIMEZONE.get().getOffset(millisLocal)) / MILLIS_PER_DAY).toInt } private def toMillisSinceEpoch(days: Int): Long = { val millisUtc = days.toLong * MILLIS_PER_DAY millisUtc - LOCAL_TIMEZONE.get().getOffset(millisUtc) } def fromJavaDate(date: java.sql.Date): Int = { javaDateToDays(date) } def toJavaDate(daysSinceEpoch: Int): java.sql.Date = { new java.sql.Date(toMillisSinceEpoch(daysSinceEpoch)) } def toString(days: Int): String = Cast.threadLocalDateFormat.get.format(toJavaDate(days)) def stringToTime(s: String): java.util.Date = { if (!s.contains('T')) { // JDBC escape string if (s.contains(' ')) { java.sql.Timestamp.valueOf(s) } else { java.sql.Date.valueOf(s) } } else if (s.endsWith("Z")) { // this is zero timezone of ISO8601 stringToTime(s.substring(0, s.length - 1) + "GMT-00:00") } else if (s.indexOf("GMT") == -1) { // timezone with ISO8601 val inset = "+00.00".length val s0 = s.substring(0, s.length - inset) val s1 = s.substring(s.length - inset, s.length) if (s0.substring(s0.lastIndexOf(':')).contains('.')) { stringToTime(s0 + "GMT" + s1) } else { stringToTime(s0 + ".0GMT" + s1) } } else { // ISO8601 with GMT insert val ISO8601GMT: SimpleDateFormat = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSz" ) ISO8601GMT.parse(s) } } }
Example 70
Source File: exercise06.scala From scala-for-the-Impatient with MIT License | 5 votes |
import java.util.Calendar import scala.collection.mutable def calendarDays():Unit = { val daysMap = mutable.LinkedHashMap( "Monday" -> Calendar.MONDAY, "Tuesday" -> Calendar.TUESDAY, "Wednesday" -> Calendar.WEDNESDAY, "Thursday" -> Calendar.THURSDAY, "Friday" -> Calendar.FRIDAY, "Saturday" -> Calendar.SATURDAY, "Sunday" -> Calendar.SUNDAY ) println(daysMap.mkString(",")) }
Example 71
Source File: JacksonMessageWriter.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type import java.nio.charset.StandardCharsets import java.text.SimpleDateFormat import java.util.{Calendar, Locale, SimpleTimeZone} import javax.ws.rs.Produces import javax.ws.rs.core.{MediaType, MultivaluedMap} import javax.ws.rs.ext.{MessageBodyWriter, Provider} import com.fasterxml.jackson.annotation.JsonInclude import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} @Provider @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{ val mapper = new ObjectMapper() { override def writeValueAsString(t: Any): String = { super.writeValueAsString(t) } } mapper.registerModule(com.fasterxml.jackson.module.scala.DefaultScalaModule) mapper.enable(SerializationFeature.INDENT_OUTPUT) mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL) mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat) override def isWriteable( aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Boolean = { true } override def writeTo( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType, multivaluedMap: MultivaluedMap[String, AnyRef], outputStream: OutputStream): Unit = { t match { case ErrorWrapper(err) => outputStream.write(err.getBytes(StandardCharsets.UTF_8)) case _ => mapper.writeValue(outputStream, t) } } override def getSize( t: Object, aClass: Class[_], `type`: Type, annotations: Array[Annotation], mediaType: MediaType): Long = { -1L } } private[spark] object JacksonMessageWriter { def makeISODateFormat: SimpleDateFormat = { val iso8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'GMT'", Locale.US) val cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")) iso8601.setCalendar(cal) iso8601 } }
Example 72
Source File: Util.scala From Machine-Learning-with-Spark-Second-Edition with MIT License | 5 votes |
package com.sparksample object Util { val PATH = "../.." val spConfig = (new SparkConf).setMaster("local").setAppName("SparkApp") var sc = new SparkContext(spConfig) def getMovieData() : RDD[String] = { val movie_data = sc.textFile(PATH + "/data/ml-100k/u.item") return movie_data } def getUserData() : RDD[String] = { val user_data = sc.textFile(PATH + "/data/ml-100k/u.data") return user_data } def getDate(): String = { val today = Calendar.getInstance().getTime() // (2) create a date "formatter" (the date format we want) val formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss") // (3) create a new String using the date format we want val folderName = formatter.format(today) return folderName } def cosineSimilarity(vec1: DoubleMatrix, vec2: DoubleMatrix): Double = { vec1.dot(vec2) / (vec1.norm2() * vec2.norm2()) } def avgPrecisionK(actual: Seq[Int], predicted: Seq[Int], k: Int): Double = { val predK = predicted.take(k) var score = 0.0 var numHits = 0.0 for ((p, i) <- predK.zipWithIndex) { if (actual.contains(p)) { numHits += 1.0 score += numHits / (i.toDouble + 1.0) } } if (actual.isEmpty) { 1.0 } else { score / scala.math.min(actual.size, k).toDouble } } }
Example 73
Source File: ScalaApp.scala From Machine-Learning-with-Spark-Second-Edition with MIT License | 5 votes |
import java.text.SimpleDateFormat import java.util.Calendar import org.apache.spark.SparkContext import org.apache.spark.mllib.recommendation.{ALS, Rating} //import org.apache.spark. val predictedRating = model.predict(789, 123) println(predictedRating) val userId = 789 val K = 10 val topKRecs = model.recommendProducts(userId, K) println(topKRecs.mkString("\n")) val movies = sc.textFile(PATH + "/ml-100k/u.item") val titles = movies.map(line => line.split("\\|").take(2)).map(array => (array(0).toInt, array(1))).collectAsMap() titles(123) // res68: String = Frighteners, The (1996) val moviesForUser = ratings.keyBy(_.user).lookup(789) // moviesForUser: Seq[org.apache.spark.mllib.recommendation.Rating] = WrappedArray(Rating(789,1012,4.0), Rating(789,127,5.0), Rating(789,475,5.0), Rating(789,93,4.0), ... // ... println(moviesForUser.size) moviesForUser.sortBy(-_.rating).take(10).map(rating => (titles(rating.product), rating.rating)).foreach(println) topKRecs.map(rating => (titles(rating.product), rating.rating)).foreach(println) sc.stop() //bw.close() } class Util { def getDate(): String = { val today = Calendar.getInstance().getTime() // (2) create a date "formatter" (the date format we want) val formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss") // (3) create a new String using the date format we want val folderName = formatter.format(today) return folderName } } }
Example 74
Source File: BisectingKMeansPersist.scala From Machine-Learning-with-Spark-Second-Edition with MIT License | 5 votes |
package org.sparksamples.kmeans import java.text.SimpleDateFormat import org.apache.spark.SparkConf import org.apache.spark.ml.clustering.BisectingKMeans import org.apache.spark.sql.SparkSession object BisectingKMeansPersist { val PATH = "/home/ubuntu/work/spark-2.0.0-bin-hadoop2.7/" val BASE = "./data/movie_lens_libsvm_2f" val time = System.currentTimeMillis() val formatter = new SimpleDateFormat("dd_MM_yyyy_hh_mm_ss") import java.util.Calendar val calendar = Calendar.getInstance() calendar.setTimeInMillis(time) val date_time = formatter.format(calendar.getTime()) def main(args: Array[String]): Unit = { val spConfig = (new SparkConf).setMaster("local[1]").setAppName("SparkApp"). set("spark.driver.allowMultipleContexts", "true") val spark = SparkSession .builder() .appName("Spark SQL Example") .config(spConfig) .getOrCreate() val datasetUsers = spark.read.format("libsvm").load( BASE + "/movie_lens_2f_users_xy/part-00000") datasetUsers.show(3) val bKMeansUsers = new BisectingKMeans() bKMeansUsers.setMaxIter(10) bKMeansUsers.setMinDivisibleClusterSize(5) val modelUsers = bKMeansUsers.fit(datasetUsers) val predictedUserClusters = modelUsers.transform(datasetUsers) modelUsers.clusterCenters.foreach(println) val predictedDataSetUsers = modelUsers.transform(datasetUsers) val predictionsUsers = predictedDataSetUsers.select("prediction").rdd.map(x=> x(0)) predictionsUsers.saveAsTextFile(BASE + "/prediction/" + date_time + "/bkmeans_2f_users") val datasetItems = spark.read.format("libsvm").load(BASE + "/movie_lens_2f_items_xy/part-00000") datasetItems.show(3) val kmeansItems = new BisectingKMeans().setK(5).setSeed(1L) val modelItems = kmeansItems.fit(datasetItems) val predictedDataSetItems = modelItems.transform(datasetItems) val predictionsItems = predictedDataSetItems.select("prediction").rdd.map(x=> x(0)) predictionsItems.saveAsTextFile(BASE + "/prediction/" + date_time + "/bkmeans_2f_items") spark.stop() } }
Example 75
Source File: GMMClusteringPersist.scala From Machine-Learning-with-Spark-Second-Edition with MIT License | 5 votes |
package org.sparksamples.gmm import java.text.SimpleDateFormat import org.apache.spark.SparkConf import org.apache.spark.ml.clustering.{GaussianMixture} import org.apache.spark.sql.SparkSession object GMMClusteringPersist { val PATH = "/home/ubuntu/work/spark-2.0.0-bin-hadoop2.7/" val BASE = "./data/movie_lens_libsvm_2f" val time = System.currentTimeMillis() val formatter = new SimpleDateFormat("dd_MM_yyyy_hh_mm_ss") import java.util.Calendar val calendar = Calendar.getInstance() calendar.setTimeInMillis(time) val date_time = formatter.format(calendar.getTime()) def main(args: Array[String]): Unit = { val spConfig = (new SparkConf).setMaster("local[1]").setAppName("SparkApp"). set("spark.driver.allowMultipleContexts", "true") val spark = SparkSession .builder() .appName("Spark SQL Example") .config(spConfig) .getOrCreate() val datasetUsers = spark.read.format("libsvm").load( BASE + "/movie_lens_2f_users_libsvm/part-00000") datasetUsers.show(3) val gmmUsers = new GaussianMixture().setK(5).setSeed(1L) gmmUsers.setMaxIter(20) val modelUsers = gmmUsers.fit(datasetUsers) val predictedDataSetUsers = modelUsers.transform(datasetUsers) val predictionsUsers = predictedDataSetUsers.select("prediction").rdd.map(x=> x(0)) predictionsUsers.saveAsTextFile(BASE + "/prediction/" + date_time + "/gmm_2f_users") val dataSetItems = spark.read.format("libsvm").load(BASE + "/movie_lens_2f_items_libsvm/part-00000") val gmmItems = new GaussianMixture().setK(5).setSeed(1L) val modelItems = gmmItems.fit(dataSetItems) val predictedDataSetItems = modelItems.transform(dataSetItems) val predictionsItems = predictedDataSetItems.select("prediction").rdd.map(x=> x(0)) predictionsItems.saveAsTextFile(BASE + "/prediction/" + date_time + "/gmm_2f_items") spark.stop() } }
Example 76
Source File: CustomTelemetryService.scala From finagle-prometheus with MIT License | 5 votes |
package com.samstarling.prometheusfinagle.examples import java.text.SimpleDateFormat import java.util.Calendar import com.samstarling.prometheusfinagle.metrics.Telemetry import com.twitter.finagle.Service import com.twitter.finagle.http.{Request, Response, Status} import com.twitter.util.Future class CustomTelemetryService(telemetry: Telemetry) extends Service[Request, Response] { private val dayOfWeekFormat = new SimpleDateFormat("E") private val counter = telemetry.counter("requests_by_day_of_week", "Help text", Seq("day_of_week")) override def apply(request: Request): Future[Response] = { dayOfWeek counter.labels(dayOfWeek).inc() val rep = Response(request.version, Status.Ok) rep.setContentString("Your request was logged!") Future(rep) } private def dayOfWeek: String = { dayOfWeekFormat.format(Calendar.getInstance.getTime) } }
Example 77
Source File: PluginsFilesUtils.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.sparta.serving.core.utils import java.io.File import java.net.URL import java.util.{Calendar, UUID} import akka.event.slf4j.SLF4JLogging import com.stratio.sparta.serving.core.helpers.JarsHelper import org.apache.commons.io.FileUtils trait PluginsFilesUtils extends SLF4JLogging { def addPluginsToClassPath(pluginsFiles: Array[String]): Unit = { log.info(pluginsFiles.mkString(",")) pluginsFiles.foreach(filePath => { log.info(s"Adding to classpath plugin file: $filePath") if (filePath.startsWith("/") || filePath.startsWith("file://")) addFromLocal(filePath) if (filePath.startsWith("hdfs")) addFromHdfs(filePath) if (filePath.startsWith("http")) addFromHttp(filePath) }) } private def addFromLocal(filePath: String): Unit = { log.info(s"Getting file from local: $filePath") val file = new File(filePath.replace("file://", "")) JarsHelper.addToClasspath(file) } private def addFromHdfs(fileHdfsPath: String): Unit = { log.info(s"Getting file from HDFS: $fileHdfsPath") val inputStream = HdfsUtils().getFile(fileHdfsPath) val fileName = fileHdfsPath.split("/").last log.info(s"HDFS file name is $fileName") val file = new File(s"/tmp/sparta/userjars/${UUID.randomUUID().toString}/$fileName") log.info(s"Downloading HDFS file to local file system: ${file.getAbsoluteFile}") FileUtils.copyInputStreamToFile(inputStream, file) JarsHelper.addToClasspath(file) } private def addFromHttp(fileURI: String): Unit = { log.info(s"Getting file from HTTP: $fileURI") val tempFile = File.createTempFile(s"sparta-plugin-${Calendar.getInstance().getTimeInMillis}", ".jar") val url = new URL(fileURI) FileUtils.copyURLToFile(url, tempFile) JarsHelper.addToClasspath(tempFile) } }