org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionEnd Scala Examples
The following examples show how to use org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionEnd.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ProfilerListener.scala From carbondata with Apache License 2.0 | 6 votes |
package org.apache.spark.sql.profiler import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent, SparkListenerTaskEnd, SparkListenerTaskGettingResult, SparkListenerTaskStart} import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} private[profiler] class ProfilerListener extends SparkListener { override def onOtherEvent(event: SparkListenerEvent): Unit = { Profiler.invokeIfEnable { event match { case executionStart: SparkListenerSQLExecutionStart => Profiler.addExecutionMessage( executionStart.executionId, ExecutionStart( executionStart.executionId, executionStart.time, executionStart.physicalPlanDescription )) case executionEnd: SparkListenerSQLExecutionEnd => Profiler.send( ExecutionEnd( executionEnd.executionId, executionEnd.time ) ) case _ => } } } }
Example 2
Source File: SQLExecution.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} object SQLExecution { val EXECUTION_ID_KEY = "spark.sql.execution.id" private val _nextExecutionId = new AtomicLong(0) private def nextExecutionId: Long = _nextExecutionId.getAndIncrement def withExecutionId[T](sc: SparkContext, executionId: String)(body: => T): T = { val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) try { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId) body } finally { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId) } } }
Example 3
Source File: SQLExecution.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} object SQLExecution { val EXECUTION_ID_KEY = "spark.sql.execution.id" private val _nextExecutionId = new AtomicLong(0) private def nextExecutionId: Long = _nextExecutionId.getAndIncrement private val executionIdToQueryExecution = new ConcurrentHashMap[Long, QueryExecution]() def getQueryExecution(executionId: Long): QueryExecution = { executionIdToQueryExecution.get(executionId) } private val testing = sys.props.contains("spark.testing") private[sql] def checkSQLExecutionId(sparkSession: SparkSession): Unit = { val sc = sparkSession.sparkContext // only throw an exception during tests. a missing execution ID should not fail a job. if (testing && sc.getLocalProperty(EXECUTION_ID_KEY) == null) { // Attention testers: when a test fails with this exception, it means that the action that // started execution of a query didn't call withNewExecutionId. The execution ID should be // set by calling withNewExecutionId in the action that begins execution, like // Dataset.collect or DataFrameWriter.insertInto. throw new IllegalStateException("Execution ID should be set") } } def withSQLConfPropagated[T](sparkSession: SparkSession)(body: => T): T = { val sc = sparkSession.sparkContext // Set all the specified SQL configs to local properties, so that they can be available at // the executor side. val allConfigs = sparkSession.sessionState.conf.getAllConfs val originalLocalProps = allConfigs.collect { case (key, value) if key.startsWith("spark") => val originalValue = sc.getLocalProperty(key) sc.setLocalProperty(key, value) (key, originalValue) } try { body } finally { for ((key, value) <- originalLocalProps) { sc.setLocalProperty(key, value) } } } }
Example 4
Source File: SQLExecution.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} object SQLExecution { val EXECUTION_ID_KEY = "spark.sql.execution.id" private val _nextExecutionId = new AtomicLong(0) private def nextExecutionId: Long = _nextExecutionId.getAndIncrement def withExecutionId[T](sc: SparkContext, executionId: String)(body: => T): T = { val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) try { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId) body } finally { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId) } } }
Example 5
Source File: SQLExecution.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} object SQLExecution { val EXECUTION_ID_KEY = "spark.sql.execution.id" private val _nextExecutionId = new AtomicLong(0) private def nextExecutionId: Long = _nextExecutionId.getAndIncrement def withExecutionId[T](sc: SparkContext, executionId: String)(body: => T): T = { val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) try { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId) body } finally { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId) } } }
Example 6
Source File: SQLExecution.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart} object SQLExecution { val EXECUTION_ID_KEY = "spark.sql.execution.id" private val _nextExecutionId = new AtomicLong(0) private def nextExecutionId: Long = _nextExecutionId.getAndIncrement private val executionIdToQueryExecution = new ConcurrentHashMap[Long, QueryExecution]() def getQueryExecution(executionId: Long): QueryExecution = { executionIdToQueryExecution.get(executionId) } private val testing = sys.props.contains("spark.testing") private[sql] def checkSQLExecutionId(sparkSession: SparkSession): Unit = { val sc = sparkSession.sparkContext // only throw an exception during tests. a missing execution ID should not fail a job. if (testing && sc.getLocalProperty(EXECUTION_ID_KEY) == null) { // Attention testers: when a test fails with this exception, it means that the action that // started execution of a query didn't call withNewExecutionId. The execution ID should be // set by calling withNewExecutionId in the action that begins execution, like // Dataset.collect or DataFrameWriter.insertInto. throw new IllegalStateException("Execution ID should be set") } } def withExecutionId[T](sc: SparkContext, executionId: String)(body: => T): T = { val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) try { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId) body } finally { sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId) } } }