org.apache.spark.TaskContextImpl Scala Examples

The following examples show how to use org.apache.spark.TaskContextImpl. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: MemoryTestingUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.memory

import java.util.Properties

import org.apache.spark.{SparkEnv, TaskContext, TaskContextImpl}


object MemoryTestingUtils {
  def fakeTaskContext(env: SparkEnv): TaskContext = {
    val taskMemoryManager = new TaskMemoryManager(env.memoryManager, 0)
    new TaskContextImpl(
      stageId = 0,
      partitionId = 0,
      taskAttemptId = 0,
      attemptNumber = 0,
      _taskMemoryManager = taskMemoryManager,
      localProperties = new Properties,
      metricsSystem = env.metricsSystem)
  }
} 
Example 2
Source File: TaskContextImplAdapter.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.oap.adapter

import java.util.Properties

import org.apache.spark.{TaskContext, TaskContextImpl}
import org.apache.spark.memory.TaskMemoryManager
import org.apache.spark.metrics.MetricsSystem


object TaskContextImplAdapter {

  
  def createTaskContextImpl(
      stageId: Int,
      partitionId: Int,
      taskAttemptId: Long,
      attemptNumber: Int,
      taskMemoryManager: TaskMemoryManager,
      localProperties: Properties,
      metricsSystem: MetricsSystem): TaskContext = {
    new TaskContextImpl(
      stageId,
      stageAttemptNumber = 0,
      partitionId,
      taskAttemptId,
      attemptNumber,
      taskMemoryManager,
      localProperties,
      metricsSystem)
  }
} 
Example 3
Source File: MemoryTestingUtils.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.memory

import java.util.Properties

import org.apache.spark.{SparkEnv, TaskContext, TaskContextImpl}


object MemoryTestingUtils {
  def fakeTaskContext(env: SparkEnv): TaskContext = {
    val taskMemoryManager = new TaskMemoryManager(env.memoryManager, 0)
    new TaskContextImpl(
      stageId = 0,
      partitionId = 0,
      taskAttemptId = 0,
      attemptNumber = 0,
      taskMemoryManager = taskMemoryManager,
      localProperties = new Properties,
      metricsSystem = env.metricsSystem)
  }
} 
Example 4
Source File: SparkUtil4Test.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import mockit.{Invocation, Mock, MockUp}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, TaskContextImpl}


  def createTaskMockUp(sqlContext: SQLContext): Unit = {
    if (!initializedMock) {
      if (sqlContext.sparkContext.version.startsWith("2.1")) {
        createTaskMockUp2_1
      } else if (sqlContext.sparkContext.version.startsWith("2.2")) {
        createTaskMockUp2_2()
      }
      initializedMock = true
    }
  }

  private def createTaskMockUp2_1 = {
    new MockUp[TaskContextImpl] {
      @Mock private[spark] def markTaskCompleted(invocation: Invocation): Unit = {
        try {
          invocation.proceed()
        } catch {
          case e: Exception => //ignore
        }
      }

      @Mock def addTaskCompletionListener(invocation: Invocation, listener: TaskCompletionListener): TaskContextImpl = {
        try {
          invocation.proceed(listener)
        } catch {
          case e: Exception => // ignore
          invocation.getInvokedInstance[TaskContextImpl]
        }
      }
    }
  }

  private def createTaskMockUp2_2(): Unit = {
    new MockUp[TaskContextImpl] {
      @Mock private[spark] def markTaskCompleted(invocation: Invocation, error: Option[Throwable]): Unit = {
        try {
          invocation.proceed(error)
        } catch {
          case e: Exception => //ignore
        }
      }
    }
  }
} 
Example 5
Source File: MemoryTestingUtils.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.memory

import java.util.Properties

import org.apache.spark.{SparkEnv, TaskContext, TaskContextImpl}


object MemoryTestingUtils {
  def fakeTaskContext(env: SparkEnv): TaskContext = {
    val taskMemoryManager = new TaskMemoryManager(env.memoryManager, 0)
    new TaskContextImpl(
      stageId = 0,
      partitionId = 0,
      taskAttemptId = 0,
      attemptNumber = 0,
      taskMemoryManager = taskMemoryManager,
      localProperties = new Properties,
      metricsSystem = env.metricsSystem)
  }
} 
Example 6
Source File: MemoryTestingUtils.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.memory

import java.util.Properties

import org.apache.spark.{SparkEnv, TaskContext, TaskContextImpl}


object MemoryTestingUtils {
  def fakeTaskContext(env: SparkEnv): TaskContext = {
    val taskMemoryManager = new TaskMemoryManager(env.memoryManager, 0)
    new TaskContextImpl(
      stageId = 0,
      stageAttemptNumber = 0,
      partitionId = 0,
      taskAttemptId = 0,
      attemptNumber = 0,
      taskMemoryManager = taskMemoryManager,
      localProperties = new Properties,
      metricsSystem = env.metricsSystem)
  }
} 
Example 7
Source File: MemoryTestingUtils.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.memory

import org.apache.spark.{SparkEnv, TaskContextImpl, TaskContext}


object MemoryTestingUtils {
  def fakeTaskContext(env: SparkEnv): TaskContext = {
    val taskMemoryManager = new TaskMemoryManager(env.memoryManager, 0)
    new TaskContextImpl(
      stageId = 0,
      partitionId = 0,
      taskAttemptId = 0,
      attemptNumber = 0,
      taskMemoryManager = taskMemoryManager,
      metricsSystem = env.metricsSystem,
      internalAccumulators = Seq.empty)
  }
}