org.apache.spark.util.SystemClock Scala Examples
The following examples show how to use org.apache.spark.util.SystemClock.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ProcessingTimeExecutorSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.streaming import java.util.concurrent.{CountDownLatch, TimeUnit} import org.apache.spark.SparkFunSuite import org.apache.spark.sql.streaming.ProcessingTime import org.apache.spark.util.{Clock, ManualClock, SystemClock} class ProcessingTimeExecutorSuite extends SparkFunSuite { test("nextBatchTime") { val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(100)) assert(processingTimeExecutor.nextBatchTime(0) === 100) assert(processingTimeExecutor.nextBatchTime(1) === 100) assert(processingTimeExecutor.nextBatchTime(99) === 100) assert(processingTimeExecutor.nextBatchTime(100) === 200) assert(processingTimeExecutor.nextBatchTime(101) === 200) assert(processingTimeExecutor.nextBatchTime(150) === 200) } test("calling nextBatchTime with the result of a previous call should return the next interval") { val intervalMS = 100 val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(intervalMS)) val ITERATION = 10 var nextBatchTime: Long = 0 for (it <- 1 to ITERATION) { nextBatchTime = processingTimeExecutor.nextBatchTime(nextBatchTime) } // nextBatchTime should be 1000 assert(nextBatchTime === intervalMS * ITERATION) } private def testBatchTermination(intervalMs: Long): Unit = { var batchCounts = 0 val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(intervalMs)) processingTimeExecutor.execute(() => { batchCounts += 1 // If the batch termination works well, batchCounts should be 3 after `execute` batchCounts < 3 }) assert(batchCounts === 3) } test("batch termination") { testBatchTermination(0) testBatchTermination(10) } test("notifyBatchFallingBehind") { val clock = new ManualClock() @volatile var batchFallingBehindCalled = false val latch = new CountDownLatch(1) val t = new Thread() { override def run(): Unit = { val processingTimeExecutor = new ProcessingTimeExecutor(ProcessingTime(100), clock) { override def notifyBatchFallingBehind(realElapsedTimeMs: Long): Unit = { batchFallingBehindCalled = true } } processingTimeExecutor.execute(() => { latch.countDown() clock.waitTillTime(200) false }) } } t.start() // Wait until the batch is running so that we don't call `advance` too early assert(latch.await(10, TimeUnit.SECONDS), "the batch has not yet started in 10 seconds") clock.advance(200) t.join() assert(batchFallingBehindCalled === true) } }
Example 2
Source File: RecurringTimer.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.internal.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { triggerActionForNextInterval() } triggerActionForNextInterval() } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer extends Logging { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() logInfo("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }
Example 3
Source File: ProcessingTimeExecutorSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.streaming import java.util.concurrent.{CountDownLatch, TimeUnit} import org.apache.spark.SparkFunSuite import org.apache.spark.sql.streaming.ProcessingTime import org.apache.spark.util.{Clock, ManualClock, SystemClock} class ProcessingTimeExecutorSuite extends SparkFunSuite { test("nextBatchTime") { val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(100)) assert(processingTimeExecutor.nextBatchTime(0) === 100) assert(processingTimeExecutor.nextBatchTime(1) === 100) assert(processingTimeExecutor.nextBatchTime(99) === 100) assert(processingTimeExecutor.nextBatchTime(100) === 200) assert(processingTimeExecutor.nextBatchTime(101) === 200) assert(processingTimeExecutor.nextBatchTime(150) === 200) } test("calling nextBatchTime with the result of a previous call should return the next interval") { val intervalMS = 100 val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(intervalMS)) val ITERATION = 10 var nextBatchTime: Long = 0 for (it <- 1 to ITERATION) { nextBatchTime = processingTimeExecutor.nextBatchTime(nextBatchTime) } // nextBatchTime should be 1000 assert(nextBatchTime === intervalMS * ITERATION) } private def testBatchTermination(intervalMs: Long): Unit = { var batchCounts = 0 val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(intervalMs)) processingTimeExecutor.execute(() => { batchCounts += 1 // If the batch termination works well, batchCounts should be 3 after `execute` batchCounts < 3 }) assert(batchCounts === 3) } test("batch termination") { testBatchTermination(0) testBatchTermination(10) } test("notifyBatchFallingBehind") { val clock = new ManualClock() @volatile var batchFallingBehindCalled = false val latch = new CountDownLatch(1) val t = new Thread() { override def run(): Unit = { val processingTimeExecutor = new ProcessingTimeExecutor(ProcessingTime(100), clock) { override def notifyBatchFallingBehind(realElapsedTimeMs: Long): Unit = { batchFallingBehindCalled = true } } processingTimeExecutor.execute(() => { latch.countDown() clock.waitTillTime(200) false }) } } t.start() // Wait until the batch is running so that we don't call `advance` too early assert(latch.await(10, TimeUnit.SECONDS), "the batch has not yet started in 10 seconds") clock.advance(200) t.join() assert(batchFallingBehindCalled === true) } }
Example 4
Source File: RecurringTimer.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.internal.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { triggerActionForNextInterval() } triggerActionForNextInterval() } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer extends Logging { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() logInfo("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }
Example 5
Source File: ProcessingTimeExecutorSuite.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.streaming import java.util.concurrent.{CountDownLatch, TimeUnit} import org.apache.spark.SparkFunSuite import org.apache.spark.sql.streaming.ProcessingTime import org.apache.spark.util.{Clock, ManualClock, SystemClock} class ProcessingTimeExecutorSuite extends SparkFunSuite { test("nextBatchTime") { val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(100)) assert(processingTimeExecutor.nextBatchTime(0) === 100) assert(processingTimeExecutor.nextBatchTime(1) === 100) assert(processingTimeExecutor.nextBatchTime(99) === 100) assert(processingTimeExecutor.nextBatchTime(100) === 200) assert(processingTimeExecutor.nextBatchTime(101) === 200) assert(processingTimeExecutor.nextBatchTime(150) === 200) } test("calling nextBatchTime with the result of a previous call should return the next interval") { val intervalMS = 100 val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(intervalMS)) val ITERATION = 10 var nextBatchTime: Long = 0 for (it <- 1 to ITERATION) { nextBatchTime = processingTimeExecutor.nextBatchTime(nextBatchTime) } // nextBatchTime should be 1000 assert(nextBatchTime === intervalMS * ITERATION) } private def testBatchTermination(intervalMs: Long): Unit = { var batchCounts = 0 val processingTimeExecutor = ProcessingTimeExecutor(ProcessingTime(intervalMs)) processingTimeExecutor.execute(() => { batchCounts += 1 // If the batch termination works well, batchCounts should be 3 after `execute` batchCounts < 3 }) assert(batchCounts === 3) } test("batch termination") { testBatchTermination(0) testBatchTermination(10) } test("notifyBatchFallingBehind") { val clock = new ManualClock() @volatile var batchFallingBehindCalled = false val latch = new CountDownLatch(1) val t = new Thread() { override def run(): Unit = { val processingTimeExecutor = new ProcessingTimeExecutor(ProcessingTime(100), clock) { override def notifyBatchFallingBehind(realElapsedTimeMs: Long): Unit = { batchFallingBehindCalled = true } } processingTimeExecutor.execute(() => { latch.countDown() clock.waitTillTime(200) false }) } } t.start() // Wait until the batch is running so that we don't call `advance` too early assert(latch.await(10, TimeUnit.SECONDS), "the batch has not yet started in 10 seconds") clock.advance(200) t.join() assert(batchFallingBehindCalled === true) } }
Example 6
Source File: RecurringTimer.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.internal.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { triggerActionForNextInterval() } triggerActionForNextInterval() } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer extends Logging { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() logInfo("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }
Example 7
Source File: RecurringTimer.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { clock.waitTillTime(nextTime) callback(nextTime) prevTime = nextTime nextTime += period logDebug("Callback for " + name + " called at time " + prevTime) } } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() println("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }
Example 8
Source File: RecurringTimer.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { triggerActionForNextInterval() } triggerActionForNextInterval() } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer extends Logging { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() logInfo("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }
Example 9
Source File: RecurringTimer.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.internal.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { triggerActionForNextInterval() } triggerActionForNextInterval() } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer extends Logging { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() logInfo("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }
Example 10
Source File: RecurringTimer.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import org.apache.spark.Logging import org.apache.spark.util.{Clock, SystemClock} private[streaming] class RecurringTimer(clock: Clock, period: Long, callback: (Long) => Unit, name: String) extends Logging { private val thread = new Thread("RecurringTimer - " + name) { setDaemon(true) override def run() { loop } } @volatile private var prevTime = -1L @volatile private var nextTime = -1L @volatile private var stopped = false private def loop() { try { while (!stopped) { triggerActionForNextInterval() } triggerActionForNextInterval() } catch { case e: InterruptedException => } } } private[streaming] object RecurringTimer extends Logging { def main(args: Array[String]) { var lastRecurTime = 0L val period = 1000 def onRecur(time: Long) { val currentTime = System.currentTimeMillis() logInfo("" + currentTime + ": " + (currentTime - lastRecurTime)) lastRecurTime = currentTime } val timer = new RecurringTimer(new SystemClock(), period, onRecur, "Test") timer.start() Thread.sleep(30 * 1000) timer.stop(true) } }