javax.servlet.http.HttpServletRequest Scala Examples
The following examples show how to use javax.servlet.http.HttpServletRequest.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: MetricsServlet.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 2
Source File: WorkerWebUI.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.deploy.worker.Worker import org.apache.spark.internal.Logging import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr, worker.conf)) } } private[worker] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR val DEFAULT_RETAINED_DRIVERS = 1000 val DEFAULT_RETAINED_EXECUTORS = 1000 }
Example 3
Source File: HistoryPage.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.history import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val requestedIncomplete = Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean val allAppsSize = parent.getApplicationList().count(_.completed != requestedIncomplete) val providerConfig = parent.getProviderConfig() val content = <div> <div class="span12"> <ul class="unstyled"> {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }} </ul> { if (allAppsSize > 0) { <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++ <div id="history-summary" class="span12 pagination"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++ <script>setAppLimit({parent.maxApplications})</script> } else if (requestedIncomplete) { <h4>No incomplete applications found!</h4> } else { <h4>No completed applications found!</h4> ++ parent.emptyListingHtml } } <a href={makePageLink(!requestedIncomplete)}> { if (requestedIncomplete) { "Back to completed applications" } else { "Show incomplete applications" } } </a> </div> </div> UIUtils.basicSparkPage(content, "History Server", true) } private def makePageLink(showIncomplete: Boolean): String = { UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete) } }
Example 4
Source File: ExecutorsPage.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.status.api.v1.ExecutorSummary import org.apache.spark.ui.{UIUtils, WebUIPage} // This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive private[ui] case class ExecutorSummaryInfo( id: String, hostPort: String, rddBlocks: Int, memoryUsed: Long, diskUsed: Long, activeTasks: Int, failedTasks: Int, completedTasks: Int, totalTasks: Int, totalDuration: Long, totalInputBytes: Long, totalShuffleRead: Long, totalShuffleWrite: Long, maxMemory: Long, executorLogs: Map[String, String]) private[ui] class ExecutorsPage( parent: ExecutorsTab, threadDumpEnabled: Boolean) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val content = <div> { <div id="active-executors"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/executorspage.js")}></script> ++ <script>setThreadDumpEnabled({threadDumpEnabled})</script> } </div>; UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true) } } private[spark] object ExecutorsPage { def getExecInfo( listener: ExecutorsListener, statusId: Int, isActive: Boolean): ExecutorSummary = { val status = if (isActive) { listener.activeStorageStatusList(statusId) } else { listener.deadStorageStatusList(statusId) } val execId = status.blockManagerId.executorId val hostPort = status.blockManagerId.hostPort val rddBlocks = status.numBlocks val memUsed = status.memUsed val maxMem = status.maxMem val diskUsed = status.diskUsed val taskSummary = listener.executorToTaskSummary.getOrElse(execId, ExecutorTaskSummary(execId)) new ExecutorSummary( execId, hostPort, isActive, rddBlocks, memUsed, diskUsed, taskSummary.totalCores, taskSummary.tasksMax, taskSummary.tasksActive, taskSummary.tasksFailed, taskSummary.tasksComplete, taskSummary.tasksActive + taskSummary.tasksFailed + taskSummary.tasksComplete, taskSummary.duration, taskSummary.jvmGCTime, taskSummary.inputBytes, taskSummary.shuffleRead, taskSummary.shuffleWrite, maxMem, taskSummary.executorLogs ) } }
Example 5
Source File: ExecutorThreadDumpPage.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.{Node, Text} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => UIUtils.decodeURLParameter(executorId) }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase } else { v1 > v2 } }.map { thread => val threadId = thread.threadId <tr id={s"thread_${threadId}_tr"} class="accordion-heading" onclick={s"toggleThreadStackTrace($threadId, false)"} onmouseover={s"onMouseOverAndOut($threadId)"} onmouseout={s"onMouseOverAndOut($threadId)"}> <td id={s"${threadId}_td_id"}>{threadId}</td> <td id={s"${threadId}_td_name"}>{thread.threadName}</td> <td id={s"${threadId}_td_state"}>{thread.threadState}</td> <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td> </tr> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)"> Collapse All </a></p> <div class="form-inline"> <div class="bs-example" data-example-id="simple-form-inline"> <div class="form-group"> <div class="input-group"> Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input> </div> </div> </div> </div> <p></p> // scalastyle:on } <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}> <thead> <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th> <th onClick="collapseAllThreadStackTrace(false)">Thread State</th> </thead> <tbody>{dumpRows}</tbody> </table> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 6
Source File: EnvironmentPage.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener private def removePass(kv: (String, String)): (String, String) = { if (kv._1.toLowerCase.contains("password") || kv._1.toLowerCase.contains("secret")) { (kv._1, "******") } else kv } def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 7
Source File: StagesTab.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled val progressListener = parent.jobProgressListener val operationGraphListener = parent.operationGraphListener val executorsListener = parent.executorsListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = progressListener.schedulingMode == Some(SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val stageId = Option(request.getParameter("id")).map(_.toInt) stageId.foreach { id => if (progressListener.activeStages.contains(id)) { sc.foreach(_.cancelStage(id)) // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } }
Example 8
Source File: JobsTab.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class JobsTab(parent: SparkUI) extends SparkUITab(parent, "jobs") { val sc = parent.sc val killEnabled = parent.killEnabled val jobProgresslistener = parent.jobProgressListener val executorListener = parent.executorsListener val operationGraphListener = parent.operationGraphListener def isFairScheduler: Boolean = jobProgresslistener.schedulingMode == Some(SchedulingMode.FAIR) def getSparkUser: String = parent.getSparkUser attachPage(new AllJobsPage(this)) attachPage(new JobPage(this)) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val jobId = Option(request.getParameter("id")).map(_.toInt) jobId.foreach { id => if (jobProgresslistener.activeJobs.contains(id)) { sc.foreach(_.cancelJob(id)) // Do a quick pause here to give Spark time to kill the job so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } }
Example 9
Source File: PoolPage.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.StageInfo import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.progressListener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = Option(request.getParameter("poolname")).map { poolname => UIUtils.decodeURLParameter(poolname) }.getOrElse { throw new IllegalArgumentException(s"Missing poolname parameter") } val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val shouldShowActiveStages = activeStages.nonEmpty val activeStagesTable = new StageTableBase(request, activeStages, "", "activeStage", parent.basePath, "stages/pool", parent.progressListener, parent.isFairScheduler, parent.killEnabled, isFailedStage = false) // For now, pool information is only accessible in live UIs val pools = sc.map(_.getPoolForName(poolName).getOrElse { throw new IllegalArgumentException(s"Unknown poolname: $poolName") }).toSeq val poolTable = new PoolTable(pools, parent) var content = <h4>Summary </h4> ++ poolTable.toNodeSeq if (shouldShowActiveStages) { content ++= <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq } UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 10
Source File: StagePageSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS} import org.apache.spark._ import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler._ import org.apache.spark.storage.StorageStatusListener import org.apache.spark.ui.exec.ExecutorsListener import org.apache.spark.ui.jobs.{JobProgressListener, StagePage, StagesTab} import org.apache.spark.ui.scope.RDDOperationGraphListener class StagePageSuite extends SparkFunSuite with LocalSparkContext { private val peakExecutionMemory = 10 test("peak execution memory only displayed if unsafe is enabled") { val unsafeConf = "spark.sql.unsafe.enabled" val conf = new SparkConf(false).set(unsafeConf, "true") val html = renderStagePage(conf).toString().toLowerCase val targetString = "peak execution memory" assert(html.contains(targetString)) // Disable unsafe and make sure it's not there val conf2 = new SparkConf(false).set(unsafeConf, "false") val html2 = renderStagePage(conf2).toString().toLowerCase assert(!html2.contains(targetString)) // Avoid setting anything; it should be displayed by default val conf3 = new SparkConf(false) val html3 = renderStagePage(conf3).toString().toLowerCase assert(html3.contains(targetString)) } test("SPARK-10543: peak execution memory should be per-task rather than cumulative") { val unsafeConf = "spark.sql.unsafe.enabled" val conf = new SparkConf(false).set(unsafeConf, "true") val html = renderStagePage(conf).toString().toLowerCase // verify min/25/50/75/max show task value not cumulative values assert(html.contains(s"<td>$peakExecutionMemory.0 b</td>" * 5)) } private def renderStagePage(conf: SparkConf): Seq[Node] = { val jobListener = new JobProgressListener(conf) val graphListener = new RDDOperationGraphListener(conf) val executorsListener = new ExecutorsListener(new StorageStatusListener(conf), conf) val tab = mock(classOf[StagesTab], RETURNS_SMART_NULLS) val request = mock(classOf[HttpServletRequest]) when(tab.conf).thenReturn(conf) when(tab.progressListener).thenReturn(jobListener) when(tab.operationGraphListener).thenReturn(graphListener) when(tab.executorsListener).thenReturn(executorsListener) when(tab.appName).thenReturn("testing") when(tab.headerTabs).thenReturn(Seq.empty) when(request.getParameter("id")).thenReturn("0") when(request.getParameter("attempt")).thenReturn("0") val page = new StagePage(tab) // Simulate a stage in job progress listener val stageInfo = new StageInfo(0, 0, "dummy", 1, Seq.empty, Seq.empty, "details") // Simulate two tasks to test PEAK_EXECUTION_MEMORY correctness (1 to 2).foreach { taskId => val taskInfo = new TaskInfo(taskId, taskId, 0, 0, "0", "localhost", TaskLocality.ANY, false) jobListener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo)) jobListener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo)) taskInfo.markFinished(TaskState.FINISHED) val taskMetrics = TaskMetrics.empty taskMetrics.incPeakExecutionMemory(peakExecutionMemory) jobListener.onTaskEnd( SparkListenerTaskEnd(0, 0, "result", Success, taskInfo, taskMetrics)) } jobListener.onStageCompleted(SparkListenerStageCompleted(stageInfo)) page.render(request) } }
Example 11
Source File: Rl4jDoomWebAppStack.scala From webapp-rl4j with MIT License | 5 votes |
package ch.epfl.doomwatcher import org.scalatra._ import scalate.ScalateSupport import org.fusesource.scalate.{ TemplateEngine, Binding } import org.fusesource.scalate.layout.DefaultLayoutStrategy import javax.servlet.http.HttpServletRequest import collection.mutable trait Rl4jDoomWebAppStack extends ScalatraServlet with ScalateSupport { notFound { // remove content type in case it was set through an action contentType = null // Try to render a ScalateTemplate if no route matched findTemplate(requestPath) map { path => contentType = "text/html" layoutTemplate(path) } orElse serveStaticResource() getOrElse resourceNotFound() } }
Example 12
Source File: ServerSocket.scala From Linkis with Apache License 2.0 | 5 votes |
package com.webank.wedatasphere.linkis.server.socket import java.util.concurrent.TimeUnit import com.webank.wedatasphere.linkis.common.collection.BlockingLoopArray import com.webank.wedatasphere.linkis.common.utils.Utils import javax.servlet.http.HttpServletRequest import com.webank.wedatasphere.linkis.server.security.SecurityFilter import org.eclipse.jetty.websocket.api.{Session, WebSocketAdapter} case class ServerSocket(request: HttpServletRequest, socketListener: SocketListener, protocol: String = "") extends WebSocketAdapter { private var session: Session = _ private[socket] var id: Int = _ val createTime = System.currentTimeMillis def user = SecurityFilter.getLoginUser(request) //Add a queue to do buffering, can not directly sendMessage back, will lead to the connection can not stand //加一个队列做缓冲,不能直接sendMessage回去,会导致连接受不住 private val cacheMessages = new BlockingLoopArray[String](100) Utils.defaultScheduler.scheduleAtFixedRate(new Runnable { override def run(): Unit = { var message = cacheMessages.poll() while(message.isDefined) { message.foreach(session.getRemote.sendString) message = cacheMessages.poll() } } }, 1000, 1000, TimeUnit.MILLISECONDS) override def onWebSocketClose(statusCode: Int, reason: String): Unit = socketListener.onClose(this, statusCode, reason) override def onWebSocketConnect(sess: Session): Unit = { session = sess socketListener.onOpen(this) } override def onWebSocketText(message: String): Unit = socketListener.onMessage(this, message) def sendMessage(message: String): Unit ={ cacheMessages.put(message) } override def toString: String = s"ServerSocket($id, $user)" }
Example 13
Source File: package.scala From Linkis with Apache License 2.0 | 5 votes |
package com.webank.wedatasphere.linkis import java.util import javax.servlet.http.HttpServletRequest import com.webank.wedatasphere.linkis.common.exception.{ErrorException, ExceptionManager, FatalException, WarnException} import com.webank.wedatasphere.linkis.common.utils.Utils import com.webank.wedatasphere.linkis.server.exception.{BDPServerErrorException, NonLoginException} import com.webank.wedatasphere.linkis.server.security.SecurityFilter import org.apache.commons.lang.StringUtils import org.apache.commons.lang.exception.ExceptionUtils import org.slf4j.Logger import scala.collection.{JavaConversions, mutable} package object server { val EXCEPTION_MSG = "errorMsg" type JMap[K, V] = java.util.HashMap[K, V] implicit def getUser(req: HttpServletRequest): String = SecurityFilter.getLoginUsername(req) def validateFailed(message: String): Message = Message(status = 2).setMessage(message) def validate[T](json: util.Map[String, T], keys: String*): Unit = { keys.foreach(k => if(!json.contains(k) || json.get(k) == null || StringUtils.isEmpty(json.get(k).toString)) throw new BDPServerErrorException(11001, s"Verification failed, $k cannot be empty!(验证失败,$k 不能为空!)")) } def error(message: String): Message = Message.error(message) implicit def ok(msg: String): Message = Message.ok(msg) implicit def error(t: Throwable): Message = Message.error(t) implicit def error(e: (String, Throwable)): Message = Message.error(e) implicit def error(msg: String, t: Throwable): Message = Message.error(msg -> t) // def tryCatch[T](tryOp: => T)(catchOp: Throwable => T): T = Utils.tryCatch(tryOp)(catchOp) // def tryCatch(tryOp: => Message)(catchOp: Throwable => Message): Message = Utils.tryCatch(tryOp){ // case nonLogin: NonLoginException => Message.noLogin(msg = nonLogin.getMessage) // case t => catchOp(t) // } def catchMsg(tryOp: => Message)(msg: String)(implicit log: Logger): Message = Utils.tryCatch(tryOp){ case fatal: FatalException => log.error("Fatal Error, system exit...", fatal) System.exit(fatal.getErrCode) Message.error("Fatal Error, system exit...") case nonLogin: NonLoginException => val message = Message.noLogin(nonLogin.getMessage) message.data(EXCEPTION_MSG, nonLogin.toMap) message case error: ErrorException => val cause = error.getCause val errorMsg = cause match { case t: ErrorException => s"error code(错误码): ${t.getErrCode}, error message(错误信息): ${t.getDesc}." case _ => s"error code(错误码): ${error.getErrCode}, error message(错误信息): ${error.getDesc}." } log.error(errorMsg, error) val message = Message.error(errorMsg) message.data(EXCEPTION_MSG, error.toMap) message case warn: WarnException => val warnMsg = s"Warning code(警告码): ${warn.getErrCode}, Warning message(警告信息): ${warn.getDesc}." log.warn(warnMsg, warn) val message = Message.warn(warnMsg) message.data(EXCEPTION_MSG, warn.toMap) message case t => log.error(msg, t) val errorMsg = ExceptionUtils.getRootCauseMessage(t) val message = if(StringUtils.isNotEmpty(errorMsg) && "operation failed(操作失败)" != msg) error(msg + "!the reason(原因):" + errorMsg) else if(StringUtils.isNotEmpty(errorMsg)) error(errorMsg) else error(msg) message.data(EXCEPTION_MSG, ExceptionManager.unknownException(message.getMessage)) } def catchIt(tryOp: => Message)(implicit log: Logger): Message = catchMsg(tryOp)("operation failed(操作失败)s") implicit def toScalaBuffer[T](list: util.List[T]): mutable.Buffer[T] = JavaConversions.asScalaBuffer(list) implicit def toScalaMap[K, V](map: util.Map[K, V]): mutable.Map[K, V] = JavaConversions.mapAsScalaMap(map) implicit def toJavaList[T](list: mutable.Buffer[T]): util.List[T] = { val arrayList = new util.ArrayList[T] list.foreach(arrayList.add) arrayList } implicit def toJavaMap[K, V](map: mutable.Map[K, V]): JMap[K, V] = { val hashMap = new util.HashMap[K, V]() map.foreach(m => hashMap.put(m._1, m._2)) hashMap } implicit def toJavaMap[K, V](map: Map[K, V]): JMap[K, V] = { val hashMap = new util.HashMap[K, V]() map.foreach(m => hashMap.put(m._1, m._2)) hashMap } implicit def asString(mapWithKey: (util.Map[String, Object], String)): String = mapWithKey._1.get(mapWithKey._2).asInstanceOf[String] implicit def getString(mapWithKey: (util.Map[String, String], String)): String = mapWithKey._1.get(mapWithKey._2) implicit def asInt(map: util.Map[String, Object], key: String): Int = map.get(key).asInstanceOf[Int] implicit def asBoolean(mapWithKey: (util.Map[String, Object], String)): Boolean = mapWithKey._1.get(mapWithKey._2).asInstanceOf[Boolean] }
Example 14
Source File: TokenUtil.scala From meteorite-core with Apache License 2.0 | 5 votes |
package bi.meteorite.core.security.tokenprovider import javax.servlet.http.HttpServletRequest import bi.meteorite.core.api.security.tokenprovider.TokenProvider def getTokenFromRequest(request: HttpServletRequest): String = { if (request.getCookies != null) { for (cookie <- request.getCookies) { if (TokenProvider.TOKEN_COOKIE_NAME == cookie.getName) { return cookie.getValue } } } val authHeader: String = request.getHeader("Authorization") if (authHeader != null && authHeader.startsWith(AUTHORIZATION_HEADER_AMDATU + " ")) { return authHeader.substring(AUTHORIZATION_HEADER_AMDATU.length + 1) } null } } final class TokenUtil { }
Example 15
Source File: SecurityFilter.scala From meteorite-core with Apache License 2.0 | 5 votes |
package bi.meteorite.core.security import javax.inject.{Inject, Named, Singleton} import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import javax.servlet.{Filter, FilterChain, FilterConfig, ServletRequest, ServletResponse} import bi.meteorite.core.api.security.AdminLoginService import bi.meteorite.core.api.security.exceptions.TokenProviderException import bi.meteorite.core.api.security.tokenprovider.TokenProvider import org.ops4j.pax.cdi.api.OsgiService @Singleton @Named("securityFilter") class SecurityFilter extends Filter { @Inject @OsgiService private var tokenProvider: TokenProvider = _ @Inject @OsgiService private var adminLoginService: AdminLoginService = _ override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain) { val servletRequest = request.asInstanceOf[HttpServletRequest] val servletResponse = response.asInstanceOf[HttpServletResponse] var s = servletRequest.getPathInfo val s2 = servletRequest.getServletPath if (s != null && s2 != null) { s = s2 + s } else if (s == null) { s = s2 } if (s.startsWith("/admin/ui/") || s == "/rest/core/auth/login") { chain.doFilter(request, response) } else if (servletRequest.getPathInfo.startsWith("/rest/core/admin")) { val token = tokenProvider.getTokenFromRequest(servletRequest) var isAdmin = false try { val userDetails = tokenProvider.verifyToken(token) if (adminLoginService.getUsername == userDetails.get(TokenProvider.USERNAME).get) { isAdmin = true } } catch { case e: TokenProviderException => } if (isAdmin) { chain.doFilter(request, response) } else { servletResponse.sendRedirect("/admin/ui/index.html") } } else { chain.doFilter(request, response) } } override def init(arg0: FilterConfig) { } override def destroy() { } def setTokenProvider(tokenProvider: TokenProvider) = this.tokenProvider = tokenProvider def setAdminLoginService(adminLoginService: AdminLoginService) = this.adminLoginService = adminLoginService }
Example 16
Source File: ExecutorNumTab.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.monitor import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils, WebUIPage} private class ExecutorNumTab(parent: SparkUI) extends SparkUITab(parent, "resources") { init() private def init(): Unit = { attachPage(new ExecutorNumPage(this)) } } private class ExecutorNumPage(parent: SparkUITab) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val content = <div> { <div id ="echart-container" class="row-fluid" style="height: 600px"></div> ++ <script type="text/javascript" src="http://echarts.baidu.com/gallery/vendors/echarts/echarts.min.js"></script> ++ <script src={UIUtils.prependBaseUri( request, "/static/special/executornumpage.js")}></script> } </div> UIUtils.headerSparkPage(request, "ExecutorNumCurve", content, parent, useDataTables = false) } }
Example 17
Source File: FileDownloadServlet.scala From udash-core with Apache License 2.0 | 5 votes |
package io.udash.rpc.utils import java.io.File import java.nio.file.Files import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} protected def resolveFileMimeType(file: File): String = Option(getServletContext.getMimeType(file.getAbsolutePath)).getOrElse("application/octet-stream") override def doGet(request: HttpServletRequest, response: HttpServletResponse): Unit = { val file = resolveFile(request) if (!file.exists()) response.sendError(404, "File not found!") else { // MIME type response.setContentType(resolveFileMimeType(file)) // content length response.setContentLengthLong(file.length) // file name response.setHeader("Content-Disposition", s"""attachment; filename="${presentedFileName(file.getName)}"""") val outStream = response.getOutputStream Files.copy(file.toPath, outStream) outStream.close() } } }
Example 18
Source File: FileUploadServlet.scala From udash-core with Apache License 2.0 | 5 votes |
package io.udash.rpc.utils import java.io.InputStream import java.nio.file.Paths import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} import com.avsystem.commons._ protected def handleFile(name: String, content: InputStream): Unit override protected def doPost(request: HttpServletRequest, response: HttpServletResponse): Unit = { request.getParts.asScala .filter(part => fileFields.contains(part.getName)) .foreach(filePart => { val fileName = Paths.get(filePart.getSubmittedFileName).getFileName.toString val fileContent = filePart.getInputStream handleFile(fileName, fileContent) fileContent.close() }) } }
Example 19
Source File: OpenApiServlet.scala From udash-core with Apache License 2.0 | 5 votes |
package io.udash package rest.openapi import com.avsystem.commons.OptArg import com.avsystem.commons.annotation.explicitGenerics import com.avsystem.commons.serialization.json.JsonStringOutput import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} object OpenApiServlet { @explicitGenerics def apply[RestApi: OpenApiMetadata]( info: Info, components: Components = Components(), servers: List[Server] = Nil, security: List[SecurityRequirement] = Nil, tags: List[Tag] = Nil, externalDocs: OptArg[ExternalDocumentation] = OptArg.Empty ): OpenApiServlet = new OpenApiServlet { protected def render(request: HttpServletRequest): OpenApi = implicitly[OpenApiMetadata[RestApi]].openapi(info, components, servers, security, tags, externalDocs) } } abstract class OpenApiServlet extends HttpServlet { protected def render(request: HttpServletRequest): OpenApi override def doGet(req: HttpServletRequest, resp: HttpServletResponse): Unit = { resp.setContentType("application/json;charset=utf-8") resp.getWriter.write(JsonStringOutput.writePretty(render(req))) } }
Example 20
Source File: HttpStreamServer.scala From spark-http-stream with BSD 2-Clause "Simplified" License | 5 votes |
package org.apache.spark.sql.execution.streaming.http import org.apache.spark.internal.Logging import org.eclipse.jetty.server.Server import org.eclipse.jetty.servlet.ServletContextHandler import javax.servlet.ServletConfig import javax.servlet.http.HttpServlet import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletResponse import org.eclipse.jetty.servlet.ServletHolder import scala.collection.JavaConversions class MissingRequiredRequestParameterException(paramName: String) extends RuntimeException(s"missing required request parameter: $paramName") { } class UnsupportedActionException(action: String) extends RuntimeException(s"unsupported action in HTTP request header: $action") { } object HttpStreamServer { def start(httpServletPath: String, httpPort: Int) = { val server = new HttpStreamServer(httpServletPath, httpPort); server.start; server; } } def withActionsHandler[T <: ActionsHandler](actionsHandler: T): T = { this.actionsHandler = actionsHandler; actionsHandler; } def withBuffer(): MemoryBufferAsReceiver = { withActionsHandler(new MemoryBufferAsReceiver()); } def withKafka(bootstrapServers: String): KafkaAsReceiver = { withActionsHandler(new KafkaAsReceiver(bootstrapServers)); } def stop() = { httpStreamServlet.destroy(); if (server != null) server.stop(); } }
Example 21
Source File: FiberCacheManagerPage.scala From OAP with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.oap.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.internal.Logging import org.apache.spark.sql.execution.datasources.oap.filecache.CacheStats import org.apache.spark.sql.oap.OapRuntime import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class FiberCacheManagerPage(parent: OapTab) extends WebUIPage("") with Logging { def render(request: HttpServletRequest): Seq[Node] = { val content = <div> { <div id="active-cms"></div> ++ <script src={UIUtils.prependBaseUri(request, parent.basePath, "/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri(request, parent.basePath, "/static/oap/oap.js")}></script> } </div> UIUtils.headerSparkPage(request, "FiberCacheManager", content, parent, useDataTables = true) } } class FiberCacheManagerSummary private[spark]( val id: String, val hostPort: String, val isActive: Boolean, val indexDataCacheSeparationEnable: Boolean, val memoryUsed: Long, val maxMemory: Long, val cacheSize: Long, val cacheCount: Long, val backendCacheSize: Long, val backendCacheCount: Long, val dataFiberSize: Long, val dataFiberCount: Long, val indexFiberSize: Long, val indexFiberCount: Long, val pendingFiberSize: Long, val pendingFiberCount: Long, val dataFiberHitCount: Long, val dataFiberMissCount: Long, val dataFiberLoadCount: Long, val dataTotalLoadTime: Long, val dataEvictionCount: Long, val indexFiberHitCount: Long, val indexFiberMissCount: Long, val indexFiberLoadCount: Long, val indexTotalLoadTime: Long, val indexEvictionCount: Long)
Example 22
Source File: AccessFilter.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.server import javax.servlet._ import javax.servlet.http.{HttpServletRequest, HttpServletResponse} private[livy] class AccessFilter(accessManager: AccessManager) extends Filter { override def init(filterConfig: FilterConfig): Unit = {} override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = { val httpRequest = request.asInstanceOf[HttpServletRequest] val remoteUser = httpRequest.getRemoteUser if (accessManager.isUserAllowed(remoteUser)) { chain.doFilter(request, response) } else { val httpServletResponse = response.asInstanceOf[HttpServletResponse] httpServletResponse.sendError(HttpServletResponse.SC_FORBIDDEN, "User not authorised to use Livy.") } } override def destroy(): Unit = {} }
Example 23
Source File: CsrfFilter.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.server import javax.servlet._ import javax.servlet.http.{HttpServletRequest, HttpServletResponse} class CsrfFilter extends Filter { val METHODS_TO_IGNORE = Set("GET", "OPTIONS", "HEAD"); val HEADER_NAME = "X-Requested-By"; override def init(filterConfig: FilterConfig): Unit = {} override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = { val httpRequest = request.asInstanceOf[HttpServletRequest] if (!METHODS_TO_IGNORE.contains(httpRequest.getMethod) && httpRequest.getHeader(HEADER_NAME) == null) { response.asInstanceOf[HttpServletResponse].sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing Required Header for CSRF protection.") } else { chain.doFilter(request, response) } } override def destroy(): Unit = {} }
Example 24
Source File: BatchSessionServlet.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.server.batch import javax.servlet.http.HttpServletRequest import org.apache.livy.LivyConf import org.apache.livy.server.{AccessManager, SessionServlet} import org.apache.livy.server.recovery.SessionStore import org.apache.livy.sessions.BatchSessionManager import org.apache.livy.utils.AppInfo case class BatchSessionView( id: Long, name: Option[String], owner: String, proxyUser: Option[String], state: String, appId: Option[String], appInfo: AppInfo, log: Seq[String]) class BatchSessionServlet( sessionManager: BatchSessionManager, sessionStore: SessionStore, livyConf: LivyConf, accessManager: AccessManager) extends SessionServlet(sessionManager, livyConf, accessManager) { override protected def createSession(req: HttpServletRequest): BatchSession = { val createRequest = bodyAs[CreateBatchRequest](req) val sessionId = sessionManager.nextId() val sessionName = createRequest.name BatchSession.create( sessionId, sessionName, createRequest, livyConf, accessManager, remoteUser(req), proxyUser(req, createRequest.proxyUser), sessionStore) } override protected[batch] def clientSessionView( session: BatchSession, req: HttpServletRequest): Any = { val logs = if (accessManager.hasViewAccess(session.owner, effectiveUser(req), session.proxyUser.getOrElse(""))) { val lines = session.logLines() val size = 10 val from = math.max(0, lines.length - size) val until = from + size lines.view(from, until).toSeq } else { Nil } BatchSessionView(session.id, session.name, session.owner, session.proxyUser, session.state.toString, session.appId, session.appInfo, logs) } }
Example 25
Source File: MetricsServlet.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 26
Source File: WorkerWebUI.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.deploy.worker.Worker import org.apache.spark.internal.Logging import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr, worker.conf)) } } private[worker] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR val DEFAULT_RETAINED_DRIVERS = 1000 val DEFAULT_RETAINED_EXECUTORS = 1000 }
Example 27
Source File: HistoryPage.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.history import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val requestedIncomplete = Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean val allAppsSize = parent.getApplicationList().count(_.completed != requestedIncomplete) val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess() val lastUpdatedTime = parent.getLastUpdatedTime() val providerConfig = parent.getProviderConfig() val content = <script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script> <div> <div class="span12"> <ul class="unstyled"> {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }} </ul> { if (eventLogsUnderProcessCount > 0) { <p>There are {eventLogsUnderProcessCount} event log(s) currently being processed which may result in additional applications getting listed on this page. Refresh the page to view updates. </p> } } { if (lastUpdatedTime > 0) { <p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p> } } { if (allAppsSize > 0) { <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++ <div id="history-summary" class="span12 pagination"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++ <script>setAppLimit({parent.maxApplications})</script> } else if (requestedIncomplete) { <h4>No incomplete applications found!</h4> } else if (eventLogsUnderProcessCount > 0) { <h4>No completed applications found!</h4> } else { <h4>No completed applications found!</h4> ++ parent.emptyListingHtml } } <a href={makePageLink(!requestedIncomplete)}> { if (requestedIncomplete) { "Back to completed applications" } else { "Show incomplete applications" } } </a> </div> </div> UIUtils.basicSparkPage(content, "History Server", true) } private def makePageLink(showIncomplete: Boolean): String = { UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete) } }
Example 28
Source File: ExecutorsPage.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.status.api.v1.ExecutorSummary import org.apache.spark.ui.{UIUtils, WebUIPage} // This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive private[ui] case class ExecutorSummaryInfo( id: String, hostPort: String, rddBlocks: Int, memoryUsed: Long, diskUsed: Long, activeTasks: Int, failedTasks: Int, completedTasks: Int, totalTasks: Int, totalDuration: Long, totalInputBytes: Long, totalShuffleRead: Long, totalShuffleWrite: Long, maxMemory: Long, executorLogs: Map[String, String]) private[ui] class ExecutorsPage( parent: ExecutorsTab, threadDumpEnabled: Boolean) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val content = <div> { <div id="active-executors"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/executorspage.js")}></script> ++ <script>setThreadDumpEnabled({threadDumpEnabled})</script> } </div>; UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true) } } private[spark] object ExecutorsPage { def getExecInfo( listener: ExecutorsListener, statusId: Int, isActive: Boolean): ExecutorSummary = { val status = if (isActive) { listener.activeStorageStatusList(statusId) } else { listener.deadStorageStatusList(statusId) } val execId = status.blockManagerId.executorId val hostPort = status.blockManagerId.hostPort val rddBlocks = status.numBlocks val memUsed = status.memUsed val maxMem = status.maxMem val diskUsed = status.diskUsed val taskSummary = listener.executorToTaskSummary.getOrElse(execId, ExecutorTaskSummary(execId)) new ExecutorSummary( execId, hostPort, isActive, rddBlocks, memUsed, diskUsed, taskSummary.totalCores, taskSummary.tasksMax, taskSummary.tasksActive, taskSummary.tasksFailed, taskSummary.tasksComplete, taskSummary.tasksActive + taskSummary.tasksFailed + taskSummary.tasksComplete, taskSummary.duration, taskSummary.jvmGCTime, taskSummary.inputBytes, taskSummary.shuffleRead, taskSummary.shuffleWrite, maxMem, taskSummary.executorLogs ) } }
Example 29
Source File: ExecutorThreadDumpPage.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.{Node, Text} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => UIUtils.decodeURLParameter(executorId) }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase } else { v1 > v2 } }.map { thread => val threadId = thread.threadId val blockedBy = thread.blockedByThreadId match { case Some(blockedByThreadId) => <div> Blocked by <a href={s"#${thread.blockedByThreadId}_td_id"}> Thread {thread.blockedByThreadId} {thread.blockedByLock}</a> </div> case None => Text("") } val heldLocks = thread.holdingLocks.mkString(", ") <tr id={s"thread_${threadId}_tr"} class="accordion-heading" onclick={s"toggleThreadStackTrace($threadId, false)"} onmouseover={s"onMouseOverAndOut($threadId)"} onmouseout={s"onMouseOverAndOut($threadId)"}> <td id={s"${threadId}_td_id"}>{threadId}</td> <td id={s"${threadId}_td_name"}>{thread.threadName}</td> <td id={s"${threadId}_td_state"}>{thread.threadState}</td> <td id={s"${threadId}_td_locking"}>{blockedBy}{heldLocks}</td> <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td> </tr> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)"> Collapse All </a></p> <div class="form-inline"> <div class="bs-example" data-example-id="simple-form-inline"> <div class="form-group"> <div class="input-group"> Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input> </div> </div> </div> </div> <p></p> // scalastyle:on } <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}> <thead> <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th> <th onClick="collapseAllThreadStackTrace(false)">Thread State</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Locks</th> </thead> <tbody>{dumpRows}</tbody> </table> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 30
Source File: EnvironmentPage.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener private def removePass(kv: (String, String)): (String, String) = { if (kv._1.toLowerCase.contains("password") || kv._1.toLowerCase.contains("secret")) { (kv._1, "******") } else kv } def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 31
Source File: StagesTab.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled val progressListener = parent.jobProgressListener val operationGraphListener = parent.operationGraphListener val executorsListener = parent.executorsListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = progressListener.schedulingMode == Some(SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val stageId = Option(request.getParameter("id")).map(_.toInt) stageId.foreach { id => if (progressListener.activeStages.contains(id)) { sc.foreach(_.cancelStage(id)) // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } }
Example 32
Source File: JobsTab.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class JobsTab(parent: SparkUI) extends SparkUITab(parent, "jobs") { val sc = parent.sc val killEnabled = parent.killEnabled val jobProgresslistener = parent.jobProgressListener val executorListener = parent.executorsListener val operationGraphListener = parent.operationGraphListener val hdfsExecutorMetricsListener = parent.hDFSExecutorMetricsListener def isFairScheduler: Boolean = jobProgresslistener.schedulingMode == Some(SchedulingMode.FAIR) def getSparkUser: String = parent.getSparkUser attachPage(new AllJobsPage(this)) attachPage(new JobPage(this)) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val jobId = Option(request.getParameter("id")).map(_.toInt) jobId.foreach { id => if (jobProgresslistener.activeJobs.contains(id)) { sc.foreach(_.cancelJob(id)) // Do a quick pause here to give Spark time to kill the job so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } }
Example 33
Source File: PoolPage.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.StageInfo import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.progressListener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = Option(request.getParameter("poolname")).map { poolname => UIUtils.decodeURLParameter(poolname) }.getOrElse { throw new IllegalArgumentException(s"Missing poolname parameter") } val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val shouldShowActiveStages = activeStages.nonEmpty val activeStagesTable = new StageTableBase(request, activeStages, "", "activeStage", parent.basePath, "stages/pool", parent.progressListener, parent.isFairScheduler, parent.killEnabled, isFailedStage = false) // For now, pool information is only accessible in live UIs val pools = sc.map(_.getPoolForName(poolName).getOrElse { throw new IllegalArgumentException(s"Unknown poolname: $poolName") }).toSeq val poolTable = new PoolTable(pools, parent) var content = <h4>Summary </h4> ++ poolTable.toNodeSeq if (shouldShowActiveStages) { content ++= <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq } UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 34
Source File: StagePageSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS} import org.apache.spark._ import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler._ import org.apache.spark.storage.StorageStatusListener import org.apache.spark.ui.exec.ExecutorsListener import org.apache.spark.ui.jobs.{JobProgressListener, StagePage, StagesTab} import org.apache.spark.ui.scope.RDDOperationGraphListener class StagePageSuite extends SparkFunSuite with LocalSparkContext { private val peakExecutionMemory = 10 test("peak execution memory should displayed") { val conf = new SparkConf(false) val html = renderStagePage(conf).toString().toLowerCase val targetString = "peak execution memory" assert(html.contains(targetString)) } test("SPARK-10543: peak execution memory should be per-task rather than cumulative") { val conf = new SparkConf(false) val html = renderStagePage(conf).toString().toLowerCase // verify min/25/50/75/max show task value not cumulative values assert(html.contains(s"<td>$peakExecutionMemory.0 b</td>" * 5)) } private def renderStagePage(conf: SparkConf): Seq[Node] = { val jobListener = new JobProgressListener(conf) val graphListener = new RDDOperationGraphListener(conf) val executorsListener = new ExecutorsListener(new StorageStatusListener(conf), conf) val tab = mock(classOf[StagesTab], RETURNS_SMART_NULLS) val request = mock(classOf[HttpServletRequest]) when(tab.conf).thenReturn(conf) when(tab.progressListener).thenReturn(jobListener) when(tab.operationGraphListener).thenReturn(graphListener) when(tab.executorsListener).thenReturn(executorsListener) when(tab.appName).thenReturn("testing") when(tab.headerTabs).thenReturn(Seq.empty) when(request.getParameter("id")).thenReturn("0") when(request.getParameter("attempt")).thenReturn("0") val page = new StagePage(tab) // Simulate a stage in job progress listener val stageInfo = new StageInfo(0, 0, "dummy", 1, Seq.empty, Seq.empty, "details") // Simulate two tasks to test PEAK_EXECUTION_MEMORY correctness (1 to 2).foreach { taskId => val taskInfo = new TaskInfo(taskId, taskId, 0, 0, "0", "localhost", TaskLocality.ANY, false) jobListener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo)) jobListener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo)) taskInfo.markFinished(TaskState.FINISHED) val taskMetrics = TaskMetrics.empty taskMetrics.incPeakExecutionMemory(peakExecutionMemory) jobListener.onTaskEnd( SparkListenerTaskEnd(0, 0, "result", Success, taskInfo, taskMetrics)) } jobListener.onStageCompleted(SparkListenerStageCompleted(stageInfo)) page.render(request) } }
Example 35
Source File: DemoFileDownloadServlet.scala From udash-demos with GNU General Public License v3.0 | 5 votes |
package io.udash.demos.files.jetty import java.io.File import java.net.URLDecoder import java.nio.charset.StandardCharsets import javax.servlet.http.HttpServletRequest import io.udash.demos.files.services.FilesStorage import io.udash.rpc.utils.FileDownloadServlet class DemoFileDownloadServlet(filesDir: String, contextPrefix: String) extends FileDownloadServlet { override protected def resolveFile(request: HttpServletRequest): File = { val name = URLDecoder.decode(request.getRequestURI.stripPrefix(contextPrefix + "/"), StandardCharsets.UTF_8.name()) new File(filesDir, name) } override protected def presentedFileName(name: String): String = FilesStorage.allFiles .find(_.serverFileName == name) .map(_.name) .getOrElse(name) }
Example 36
Source File: Util.scala From scuruto with MIT License | 5 votes |
package lib import javax.servlet.http.HttpServletRequest object Util { def baseURL(request: HttpServletRequest): String = { val forwardedProto: String = request.getHeader("X-Forwarded-Proto") val scheme: String = if (forwardedProto == null) request.getScheme else forwardedProto val domain: String = request.getServerName val forwardedPort: String = request.getHeader("X-Forwarded-Port") val port: Int = if (forwardedPort == null) request.getServerPort else forwardedPort.toInt val _port = { if (port == 80 || port == 443) { "" } else { ":" + port.toString } } scheme + "://" + domain + _port } }
Example 37
Source File: HistoryNotFoundPage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[spark] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 38
Source File: WorkerWebUI.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.{Logging, SparkConf} import org.apache.spark.deploy.worker.Worker import org.apache.spark.deploy.worker.ui.WorkerWebUI._ import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.AkkaUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr)) } } private[spark] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR }
Example 39
Source File: ExecutorThreadDumpPage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when // running in yarn-cluster mode. `request.getParameter("executorId")` will return // "%253Cdriver%253E". Therefore we need to decode it until we get the real id. var id = executorId var decodedId = URLDecoder.decode(id, "UTF-8") while (id != decodedId) { id = decodedId decodedId = URLDecoder.decode(id, "UTF-8") } id }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.map { thread => <div class="accordion-group"> <div class="accordion-heading" onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {thread.threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 40
Source File: EnvironmentPage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 41
Source File: StoragePage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.storage import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.storage.RDDInfo import org.apache.spark.ui.{WebUIPage, UIUtils} import org.apache.spark.util.Utils private def rddRow(rdd: RDDInfo): Seq[Node] = { // scalastyle:off <tr> <td> <a href={"%s/storage/rdd?id=%s".format(UIUtils.prependBaseUri(parent.basePath), rdd.id)}> {rdd.name} </a> </td> <td>{rdd.storageLevel.description} </td> <td>{rdd.numCachedPartitions}</td> <td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)}</td> <td sorttable_customkey={rdd.memSize.toString}>{Utils.bytesToString(rdd.memSize)}</td> <td sorttable_customkey={rdd.tachyonSize.toString}>{Utils.bytesToString(rdd.tachyonSize)}</td> <td sorttable_customkey={rdd.diskSize.toString} >{Utils.bytesToString(rdd.diskSize)}</td> </tr> // scalastyle:on } }
Example 42
Source File: StagesTab.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val killEnabled = parent.killEnabled val listener = parent.jobProgressListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler = listener.schedulingMode.exists(_ == SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest) = { if ((killEnabled) && (parent.securityManager.checkModifyPermissions(request.getRemoteUser))) { val killFlag = Option(request.getParameter("terminate")).getOrElse("false").toBoolean val stageId = Option(request.getParameter("id")).getOrElse("-1").toInt if (stageId >= 0 && killFlag && listener.activeStages.contains(stageId)) { sc.get.cancelStage(stageId) } // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } }
Example 43
Source File: PoolPage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.{Schedulable, StageInfo} import org.apache.spark.ui.{WebUIPage, UIUtils} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = request.getParameter("poolname") require(poolName != null && poolName.nonEmpty, "Missing poolname parameter") val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse, parent.basePath, parent.listener, isFairScheduler = parent.isFairScheduler, killEnabled = parent.killEnabled) // For now, pool information is only accessible in live UIs val pools = sc.map(_.getPoolForName(poolName).get).toSeq val poolTable = new PoolTable(pools, parent) val content = <h4>Summary </h4> ++ poolTable.toNodeSeq ++ <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 44
Source File: HistoryServerSuite.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.history import javax.servlet.http.HttpServletRequest import scala.collection.mutable import org.apache.hadoop.fs.Path import org.mockito.Mockito.{when} import org.scalatest.FunSuite import org.scalatest.Matchers import org.scalatest.mock.MockitoSugar import org.apache.spark.ui.SparkUI class HistoryServerSuite extends FunSuite with Matchers with MockitoSugar { test("generate history page with relative links") { val historyServer = mock[HistoryServer] val request = mock[HttpServletRequest] val ui = mock[SparkUI] val link = "/history/app1" val info = new ApplicationHistoryInfo("app1", "app1", 0, 2, 1, "xxx", true) when(historyServer.getApplicationList()).thenReturn(Seq(info)) when(ui.basePath).thenReturn(link) when(historyServer.getProviderConfig()).thenReturn(Map[String, String]()) val page = new HistoryPage(historyServer) //when val response = page.render(request) //then val links = response \\ "a" val justHrefs = for { l <- links attrs <- l.attribute("href") } yield (attrs.toString) justHrefs should contain(link) } }
Example 45
Source File: CustomPage.scala From cuesheet with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node abstract class CustomPage(val name: String) extends WebUIPage("") { private[ui] var tab: SparkUITab = _ private[ui] var title: String = _ private[ui] var ui: SparkUI = _ def attachRedirectHandler( srcPath: String, destPath: String, beforeRedirect: HttpServletRequest => Unit = x => (), basePath: String = "", httpMethods: Set[String] = Set("GET")): Unit = { // Can't use Jetty interface as it is shaded to org.spark-project; use reflection instead val createRedirectHandler = JettyUtils.getClass.getMethods.filter(_.getName == "createRedirectHandler").head val handler = createRedirectHandler.invoke(JettyUtils, srcPath, destPath, beforeRedirect, basePath, httpMethods) val attachHandler = ui.getClass.getMethods.filter(_.getName == "attachHandler").head attachHandler.invoke(ui, handler) } def renderPage(request: HttpServletRequest): Seq[Node] final override def render(request: HttpServletRequest): Seq[Node] = { val content = renderPage(request) UIUtils.headerSparkPage(title, content, tab, Some(5000)) } }
Example 46
Source File: BuildSettingListPage.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.pages import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack.Helpers.Implicits._ import com.fpd.teamcity.slack.controllers.SlackController import com.fpd.teamcity.slack.{ConfigManager, PermissionManager, Resources} import jetbrains.buildServer.controllers.BaseController import jetbrains.buildServer.serverSide.ProjectManager import jetbrains.buildServer.web.openapi.{PluginDescriptor, WebControllerManager} import org.springframework.web.servlet.ModelAndView import scala.collection.JavaConverters._ class BuildSettingListPage(controllerManager: WebControllerManager, descriptor: PluginDescriptor, config: ConfigManager, val permissionManager: PermissionManager, projectManager: ProjectManager ) extends BaseController with SlackController { controllerManager.registerController(Resources.buildSettingList.controllerUrl, this) override def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = { val view = descriptor.getPluginResourcesPath(Resources.buildSettingList.view) new ModelAndView(view, Map("list" → config.buildSettingList(request.param("buildTypeId").get).asJava).asJava) } override protected def checkPermission(request: HttpServletRequest): Boolean = request.param("buildTypeId").exists(id ⇒ permissionManager.buildAccessPermitted(request, id)) }
Example 47
Source File: BuildPage.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.pages import java.util import javax.servlet.http.HttpServletRequest import com.fpd.teamcity.slack.{ConfigManager, PermissionManager, Resources, Strings} import jetbrains.buildServer.serverSide.{ProjectManager, SBuildType} import jetbrains.buildServer.users.SUser import jetbrains.buildServer.web.openapi.buildType.BuildTypeTab import jetbrains.buildServer.web.openapi.{PluginDescriptor, WebControllerManager} class BuildPage( manager: WebControllerManager, projectManager: ProjectManager, descriptor: PluginDescriptor, configManager: ConfigManager, val permissionManager: PermissionManager ) extends BuildTypeTab( Strings.tabId, Strings.label, manager: WebControllerManager, projectManager: ProjectManager, descriptor.getPluginResourcesPath(Resources.buildPage.view)) with SlackExtension { addCssFile(descriptor.getPluginResourcesPath("css/slack-notifier.css")) addJsFile(descriptor.getPluginResourcesPath("js/slack-notifier.js")) override def isAvailable(request: HttpServletRequest): Boolean = permissionManager.buildAccessPermitted(request, getBuildType(request).getInternalId) override def fillModel(model: util.Map[String, AnyRef], request: HttpServletRequest, buildType: SBuildType, user: SUser): Unit = { model.put("buildTypeId", buildType.getBuildTypeId) model.put("buildSettingListUrl", Resources.buildSettingList.url) model.put("buildSettingEditUrl", Resources.buildSettingEdit.url) model.put("buildSettingSaveUrl", Resources.buildSettingSave.url) model.put("buildSettingDeleteUrl", Resources.buildSettingDelete.url) model.put("buildSettingTryUrl", Resources.buildSettingTry.url) } }
Example 48
Source File: ConfigPage.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.pages import java.util import javax.servlet.http.HttpServletRequest import com.fpd.teamcity.slack.{ConfigManager, PermissionManager, Resources, Strings} import jetbrains.buildServer.controllers.admin.AdminPage import jetbrains.buildServer.web.openapi.{Groupable, PagePlaces, PluginDescriptor} class ConfigPage( extension: ConfigManager, pagePlaces: PagePlaces, descriptor: PluginDescriptor, val permissionManager: PermissionManager ) extends AdminPage( pagePlaces, Strings.tabId, descriptor.getPluginResourcesPath(ConfigPage.includeUrl), Strings.label) with SlackExtension { register() addJsFile(descriptor.getPluginResourcesPath("js/slack-notifier-config.js")) override def fillModel(model: util.Map[String, AnyRef], request: HttpServletRequest): Unit = { import collection.JavaConverters._ import com.fpd.teamcity.slack.Helpers.Implicits._ model.putAll(extension.details.mapValues(_.getOrElse("")).asJava) model.put("error", request.param("error").getOrElse("")) model.put("saveConfigSubmitUrl", Resources.configPage.controllerUrl) } override def getGroup: String = Groupable.SERVER_RELATED_GROUP } object ConfigPage { private def includeUrl: String = Resources.configPage.view }
Example 49
Source File: BuildSettingEditPage.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.pages import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack.Helpers.Implicits._ import com.fpd.teamcity.slack.controllers.SlackController import com.fpd.teamcity.slack.{ConfigManager, SBuildMessageBuilder, PermissionManager, Resources} import jetbrains.buildServer.web.openapi.{PluginDescriptor, WebControllerManager} import org.springframework.web.servlet.ModelAndView import scala.collection.JavaConverters._ class BuildSettingEditPage(controllerManager: WebControllerManager, descriptor: PluginDescriptor, val permissionManager: PermissionManager, config: ConfigManager ) extends SlackController { controllerManager.registerController(Resources.buildSettingEdit.controllerUrl, this) override def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = { import com.fpd.teamcity.slack.Helpers.Implicits._ val view = descriptor.getPluginResourcesPath(Resources.buildSettingEdit.view) val result = for { key ← request.param("id") model ← config.buildSetting(key) } yield { new ModelAndView(view, Map("model" → model, "key" → key).asJava) } result getOrElse new ModelAndView(view, Map("defaultMessage" → SBuildMessageBuilder.defaultMessage).asJava) } override protected def checkPermission(request: HttpServletRequest): Boolean = request.param("buildTypeId").exists(id ⇒ permissionManager.buildAccessPermitted(request, id)) }
Example 50
Source File: ConfigController.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.controllers import java.net.URLEncoder import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack._ import com.fpd.teamcity.slack.Strings.ConfigController._ import jetbrains.buildServer.web.openapi.WebControllerManager import org.springframework.web.servlet.ModelAndView import scala.util.{Failure, Success} class ConfigController( configManager: ConfigManager, controllerManager: WebControllerManager, val permissionManager: PermissionManager, slackGateway: SlackGateway ) extends SlackController { import ConfigController._ import Helpers.Implicits._ controllerManager.registerController(Resources.configPage.controllerUrl, this) override def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = { val result = for { oauthKey ← request.param("oauthKey") } yield { val newConfig = ConfigManager.Config(oauthKey) val publicUrl = request.param("publicUrl").getOrElse("") val senderName = request.param("senderName").getOrElse("") slackGateway.sessionByConfig(newConfig).map { _ ⇒ configManager.update( oauthKey, publicUrl, request.param("personalEnabled").isDefined, request.param("enabled").isDefined, senderName, request.param("sendAsAttachment").isDefined ) } match { case Success(true) ⇒ Left(true) case Success(_) ⇒ Right(oauthTokenUpdateFailed) case Failure(e) ⇒ Right(sessionByConfigError(e.getMessage)) } } val either = result.getOrElse(Right(oauthKeyParamMissing)) redirectTo(createRedirect(either, request.getContextPath), response) } } object ConfigController { private def createRedirect(either: Either[Boolean, String], context: String): String = either match { case Left(_) ⇒ s"$context/admin/admin.html?item=${Strings.tabId}" case Right(error) ⇒ s"$context/admin/admin.html?item=${Strings.tabId}&error=${URLEncoder.encode(error, "UTF-8")}" } }
Example 51
Source File: BuildSettingsDelete.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.controllers import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack.Helpers.Implicits._ import com.fpd.teamcity.slack.{ConfigManager, PermissionManager, Resources} import jetbrains.buildServer.web.openapi.{PluginDescriptor, WebControllerManager} import org.springframework.web.servlet.ModelAndView class BuildSettingsDelete(configManager: ConfigManager, controllerManager: WebControllerManager, val permissionManager: PermissionManager, implicit val descriptor: PluginDescriptor ) extends SlackController { controllerManager.registerController(Resources.buildSettingDelete.controllerUrl, this) override def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = { val result = for { id ← request.param("id") result ← configManager.removeBuildSetting(id) } yield result ajaxView(result.filter(_ == true).map(_ ⇒ "") getOrElse "Something went wrong") } override protected def checkPermission(request: HttpServletRequest): Boolean = request.param("id").exists(id ⇒ permissionManager.settingAccessPermitted(request, id)) }
Example 52
Source File: BuildSettingsTry.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.controllers import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack.ConfigManager.BuildSetting import com.fpd.teamcity.slack.Helpers.Implicits._ import com.fpd.teamcity.slack.SlackGateway.{Destination, SlackChannel, SlackUser, attachmentToSlackMessage} import com.fpd.teamcity.slack._ import jetbrains.buildServer.serverSide.{BuildHistory, SFinishedBuild} import jetbrains.buildServer.users.SUser import jetbrains.buildServer.web.openapi.{PluginDescriptor, WebControllerManager} import jetbrains.buildServer.web.util.SessionUser import org.springframework.web.servlet.ModelAndView import scala.collection.JavaConverters._ import scala.concurrent.Await import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.{Failure, Try} class BuildSettingsTry(buildHistory: BuildHistory, configManager: ConfigManager, gateway: SlackGateway, controllerManager: WebControllerManager, val permissionManager: PermissionManager, messageBuilderFactory: MessageBuilderFactory, implicit val descriptor: PluginDescriptor ) extends SlackController { import BuildSettingsTry._ import Strings.BuildSettingsTry._ controllerManager.registerController(Resources.buildSettingTry.controllerUrl, this) override def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = Try { val id = request.param("id") .getOrElse(throw HandlerException(emptyIdParam)) val setting = configManager.buildSetting(id) .getOrElse(throw HandlerException(buildSettingNotFound)) val build = findPreviousBuild(buildHistory, setting) .getOrElse(throw HandlerException(previousBuildNotFound)) detectDestination(setting, SessionUser.getUser(request)) match { case Some(dest) ⇒ val future = gateway.sendMessage(dest, attachmentToSlackMessage( messageBuilderFactory.createForBuild(build).compile(setting.messageTemplate, setting), configManager.sendAsAttachment.exists(x ⇒ x) )) Await.result(future, 10 seconds) match { case Failure(error) ⇒ throw HandlerException(error.getMessage) case _ ⇒ messageSent(dest.toString) } case _ ⇒ throw HandlerException(unknownDestination) } } recover { case x: HandlerException ⇒ s"Error: ${x.getMessage}" } map { ajaxView } get override protected def checkPermission(request: HttpServletRequest): Boolean = request.param("id").exists(id ⇒ permissionManager.settingAccessPermitted(request, id)) } object BuildSettingsTry { def findPreviousBuild(buildHistory: BuildHistory, setting: BuildSetting): Option[SFinishedBuild] = buildHistory.getEntries(false).asScala.find(b ⇒ !b.isPersonal && b.matchBranch(setting.branchMask) && b.getBuildTypeId == setting.buildTypeId) def detectDestination(setting: BuildSetting, user: ⇒ SUser): Option[Destination] = setting.slackChannel.isEmpty match { case true if setting.notifyCommitter ⇒ Some(SlackUser(user.getEmail)) case false ⇒ Some(SlackChannel(setting.slackChannel)) case _ ⇒ None } case class HandlerException(message: String) extends Exception(message) }
Example 53
Source File: BuildSettingsSave.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.controllers import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack.ConfigManager.{BuildSetting, BuildSettingFlag} import com.fpd.teamcity.slack.Helpers.Implicits._ import com.fpd.teamcity.slack.Strings.BuildSettingsController._ import com.fpd.teamcity.slack.{ConfigManager, PermissionManager, Resources, SlackGateway} import jetbrains.buildServer.web.openapi.{PluginDescriptor, WebControllerManager} import org.springframework.web.servlet.ModelAndView import scala.util.{Failure, Success, Try} class BuildSettingsSave(val configManager: ConfigManager, controllerManager: WebControllerManager, slackGateway: SlackGateway, val permissionManager: PermissionManager, implicit val descriptor: PluginDescriptor ) extends SlackController { controllerManager.registerController(Resources.buildSettingSave.controllerUrl, this) override def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = ajaxView(handleSave(request)) def handleSave(request: HttpServletRequest): String = { def flags = { val keyToFlag = Map( "success" → BuildSettingFlag.success, "failureToSuccess" → BuildSettingFlag.failureToSuccess, "fail" → BuildSettingFlag.failure, "successToFailure" → BuildSettingFlag.successToFailure, "started" → BuildSettingFlag.started, "canceled" → BuildSettingFlag.canceled, "queued" → BuildSettingFlag.queued ) val keys = keyToFlag.keys.filter(key ⇒ request.param(key).isDefined) keys.map(keyToFlag).toSet } val result = for { // preparing params branch ← request.param("branchMask") buildId ← request.param("buildTypeId") message ← request.param("messageTemplate") config ← configManager.config } yield { lazy val artifactsMask = request.param("artifactsMask") val channel = request.param("slackChannel") val notifyCommitter = request.param("notifyCommitter").isDefined val maxVcsChanges = request.param("maxVcsChanges").getOrElse(BuildSetting.defaultMaxVCSChanges.toString).toInt // store build setting def updateConfig() = configManager.updateBuildSetting( BuildSetting(buildId, branch, channel.getOrElse(""), message, flags, artifactsMask.getOrElse(""), request.param("deepLookup").isDefined, notifyCommitter, maxVcsChanges), request.param("key") ) // check channel availability if (!channel.exists(_.nonEmpty) && !notifyCommitter) { channelOrNotifyCommitterError } else if (Try(branch.r).isFailure) { compileBranchMaskError } else if (artifactsMask.exists(s ⇒ Try(s.r).isFailure)) { compileArtifactsMaskError } else { slackGateway.sessionByConfig(config) match { case Success(session) ⇒ if (channel.exists(s ⇒ null == session.findChannelByName(s))) { channelNotFoundError(channel.get) } else { updateConfig() match { case Some(_) ⇒ "" case _ ⇒ emptyConfigError } } case Failure(e) ⇒ sessionByConfigError(e.getMessage) } } } result getOrElse requirementsError } override protected def checkPermission(request: HttpServletRequest): Boolean = request.param("buildTypeId").exists(permissionManager.buildAccessPermitted(request, _)) }
Example 54
Source File: SlackController.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack.controllers import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import com.fpd.teamcity.slack.{PermissionManager, Resources} import jetbrains.buildServer.controllers.{BaseController, SimpleView} import jetbrains.buildServer.web.openapi.PluginDescriptor import org.springframework.web.servlet.ModelAndView trait SlackController extends BaseController { protected val permissionManager: PermissionManager def handle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView override def doHandle(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = if (checkPermission(request)) handle(request, response) else SimpleView.createTextView("Access denied") def ajaxView(message: String)(implicit descriptor: PluginDescriptor): ModelAndView = { val modelAndView = new ModelAndView(descriptor.getPluginResourcesPath(Resources.ajaxView.view)) modelAndView.getModel.put("message", message) modelAndView } protected def checkPermission(request: HttpServletRequest): Boolean = permissionManager.accessPermitted(request) }
Example 55
Source File: EventsController.scala From chatoverflow with Eclipse Public License 2.0 | 5 votes |
package org.codeoverflow.chatoverflow.ui.web.rest.events import java.io.PrintWriter import java.util.concurrent.ConcurrentHashMap import javax.servlet.AsyncContext import javax.servlet.http.HttpServletRequest import org.codeoverflow.chatoverflow.ui.web.JsonServlet import org.scalatra.servlet.ScalatraAsyncSupport import org.scalatra.{BadRequest, Unauthorized} import org.scalatra.swagger.Swagger class EventsController(implicit val swagger: Swagger) extends JsonServlet with ScalatraAsyncSupport with EventsControllerDefinition { private val connectionWriters = new ConcurrentHashMap[AsyncContext, PrintWriter]() def broadcast(messageType: String, message: String = null): Unit = { connectionWriters.forEach((_, writer) => { try { sendMessage(writer, messageType, message) } catch { //probably lost or closed connection, remove from the list of connected clients case _: Throwable => connectionWriters.remove(writer) } }) } def closeConnections(): Unit = { connectionWriters.forEach((_, writer) => { try { sendMessage(writer, "close", null) writer.close() } finally { connectionWriters.remove(writer) } }) } private def sendMessage(writer: PrintWriter, messageType: String, message: String): Unit = { var msg = "event: " + messageType.replace("\n", "") + "\n" if (message != null) msg += "data: " + message.replace("\n", "\ndata: ") + "\n\n" writer.write(msg) writer.flush() } get("/", operation(getEvents)) { val accept = request.getHeader("Accept") if (accept == null || !accept.replace(" ", "").split(",").contains("text/event-stream")) { status = 406 } else { authParamRequired { contentType = "text/event-stream" val asyncContext = request.startAsync() asyncContext.setTimeout(0) val writer = asyncContext.getResponse.getWriter connectionWriters.put(asyncContext, writer) } } } private def authParamRequired(func: => Any)(implicit request: HttpServletRequest): Any = { val authKeyKey = "authKey" if (!request.parameters.contains(authKeyKey) || request.getParameter(authKeyKey).isEmpty) { BadRequest() } else if (request.getParameter(authKeyKey) != chatOverflow.credentialsService.generateAuthKey()) { Unauthorized() } else { func } } }
Example 56
Source File: JsonServlet.scala From chatoverflow with Eclipse Public License 2.0 | 5 votes |
package org.codeoverflow.chatoverflow.ui.web import javax.servlet.http.HttpServletRequest import org.codeoverflow.chatoverflow.ui.web.rest.DTOs.ResultMessage import org.codeoverflow.chatoverflow.{ChatOverflow, Launcher} import org.json4s.{DefaultFormats, Formats} import org.scalatra.json.JacksonJsonSupport import org.scalatra.{BadRequest, CorsSupport, ScalatraServlet, Unauthorized} protected def authKeyRequired(func: => Any)(implicit request: HttpServletRequest): Any = { val authKeyKey = "authKey" if (request.header(authKeyKey).isEmpty) { BadRequest() } else if (request.header(authKeyKey).get != chatOverflow.credentialsService.generateAuthKey()) { Unauthorized() } else { func } } }
Example 57
Source File: DruidQueriesPage.scala From spark-druid-olap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver.sparklinedata.ui import javax.servlet.http.HttpServletRequest import org.apache.spark.sql.SPLLogging import org.apache.spark.ui.{UIUtils, WebUIPage} import org.sparklinedata.druid.metadata.{DruidQueryExecutionView, DruidQueryHistory} import scala.xml.Node private[ui] class DruidQueriesPage(parent: DruidQueriesTab) extends WebUIPage("") with SPLLogging { def render(request: HttpServletRequest): Seq[Node] = { val content = generateDruidStatsTable() UIUtils.headerSparkPage("Druid Query Details", content, parent, Some(5000)) } private def generateDruidStatsTable(): Seq[Node] = { val numStatement = DruidQueryHistory.getHistory.size val table = if (numStatement > 0) { val headerRow = Seq("stageId", "partitionId", "taskAttemptId", "druidQueryServer", "druidSegIntervals", "startTime", "druidExecTime", "queryExecTime", "numRows", "druidQuery", "sql") val druidContent = DruidQueryHistory.getHistory def generateDataRow(info: DruidQueryExecutionView): Seq[Node] = { var interval = "" for(temp <- info.druidSegIntervals){ interval += temp } val stageLink = "%s/stages/stage?id=%s&attempt=0" .format(UIUtils.prependBaseUri(parent.basePath), info.stageId) <tr> <td><a href={stageLink}> {info.stageId} </a></td> <td> {info.partitionId} </td> <td>{info.taskAttemptId}</td> <td>{info.druidQueryServer}</td> <td>{interval}</td> <td>{info.startTime}</td> <td>{info.druidExecTime}</td> <td>{info.queryExecTime}</td> <td>{info.numRows}</td> <td>{info.druidQuery}</td> <td>{info.sqlStmt.getOrElse("none")}</td> </tr> } Some(UIUtils.listingTable(headerRow, generateDataRow, druidContent, false, None, Seq(null), false)) } else { None } val content = <h5 id="sqlstat">Druid Query Details</h5> ++ <div> <ul class="unstyled"> {table.getOrElse("No queries have been executed yet.")} </ul> </div> content } }
Example 58
Source File: StubServer.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.stub import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import cool.graph.stub.StubMatching.MatchResult import org.eclipse.jetty.server.handler.AbstractHandler import org.eclipse.jetty.server.{Server, Request => JettyRequest} import scala.collection.mutable case class StubServer(stubs: List[Stub], port: Int, stubNotFoundStatusCode: Int) { org.eclipse.jetty.util.log.Log.setLog(new JustWarningsLogger) val server = new Server(port) def createHandler = StubServerHandler(stubs, stubNotFoundStatusCode) def start: Unit = server.start def stop: Unit = server.stop def requests = handler.requests def lastRequest = handler.requests.head def lastRequest(path: String) = handler.requests.filter(_.path == path).head def requestCount(stub: Stub): Int = handler.requestCount(stub) val handler = createHandler server.setHandler(handler) } case class StubServerHandler(stubs: List[Stub], stubNotFoundStatusCode: Int) extends AbstractHandler { var requests: List[StubRequest] = List() def handle(target: String, baseRequest: JettyRequest, servletRequest: HttpServletRequest, response: HttpServletResponse): Unit = { val stubResponse = try { val stubRequest = StubRequest.fromHttpRequest(servletRequest) requests = stubRequest :: requests stubResponseForRequest(stubRequest) } catch { case e: Throwable => failedResponse(e) } response.setContentType("application/json") response.setStatus(stubResponse.status) stubResponse.headers.foreach(kv => response.setHeader(kv._1, kv._2)) response.getWriter.print(stubResponse.body) baseRequest.setHandled(true) } def stubResponseForRequest(stubRequest: StubRequest): StaticStubResponse = { val matches = StubMatching.matchStubs(stubRequest, stubs) val topCandidate = matches.find(_.isMatch) topCandidate match { case Some(result) => recordStubHit(result.stub) result.stub.stubbedResponse.getStaticStubResponse(stubRequest) case None => noMatchResponse(stubRequest, matches) } } def failedResponse(e: Throwable) = { e.printStackTrace() StaticStubResponse(stubNotFoundStatusCode, s"Stub Matching failed with the following exception: ${e.toString}") } def noMatchResponse(request: StubRequest, notMatches: List[MatchResult]) = { val queryString = request.queryMap.map { case (k, v) => s"$k=$v" }.foldLeft("?") { case (acc, x) => s"$acc&$x" } val noMatchReasons = if (stubs.isEmpty) { """ "There are no registered stubs in the server!" """ } else { notMatches.map(x => s""" "${x.noMatchMessage}" """).mkString(",\n") } val responseJson = { s"""{ | "message": "No stub found for request [URL: ${request.path}$queryString] [METHOD: ${request.httpMethod}}] [BODY: ${request.body}]", | "noMatchReasons" : [ | $noMatchReasons | ] |}""".stripMargin } StaticStubResponse(stubNotFoundStatusCode, responseJson) } def requestCount(stub: Stub): Int = requestCountMap.getOrElse(stub, 0) private def recordStubHit(stub: Stub): Unit = { val numberOfRequests = requestCountMap.getOrElse(stub, 0) requestCountMap.update(stub, numberOfRequests + 1) } private val requestCountMap: mutable.Map[Stub, Int] = mutable.Map.empty }
Example 59
Source File: MetricsServlet.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 60
Source File: WorkerWebUI.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.deploy.worker.Worker import org.apache.spark.internal.Logging import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr, worker.conf)) } } private[worker] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR val DEFAULT_RETAINED_DRIVERS = 1000 val DEFAULT_RETAINED_EXECUTORS = 1000 }
Example 61
Source File: HistoryPage.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.history import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val requestedIncomplete = Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean val allAppsSize = parent.getApplicationList().count(_.completed != requestedIncomplete) val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess() val lastUpdatedTime = parent.getLastUpdatedTime() val providerConfig = parent.getProviderConfig() val content = <script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script> <div> <div class="span12"> <ul class="unstyled"> {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }} </ul> { if (eventLogsUnderProcessCount > 0) { <p>There are {eventLogsUnderProcessCount} event log(s) currently being processed which may result in additional applications getting listed on this page. Refresh the page to view updates. </p> } } { if (lastUpdatedTime > 0) { <p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p> } } { if (allAppsSize > 0) { <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++ <div id="history-summary" class="span12 pagination"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++ <script>setAppLimit({parent.maxApplications})</script> } else if (requestedIncomplete) { <h4>No incomplete applications found!</h4> } else if (eventLogsUnderProcessCount > 0) { <h4>No completed applications found!</h4> } else { <h4>No completed applications found!</h4> ++ parent.emptyListingHtml } } <a href={makePageLink(!requestedIncomplete)}> { if (requestedIncomplete) { "Back to completed applications" } else { "Show incomplete applications" } } </a> </div> </div> UIUtils.basicSparkPage(content, "History Server", true) } private def makePageLink(showIncomplete: Boolean): String = { UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete) } }
Example 62
Source File: ExecutorsPage.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.status.api.v1.ExecutorSummary import org.apache.spark.ui.{UIUtils, WebUIPage} // This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive private[ui] case class ExecutorSummaryInfo( id: String, hostPort: String, rddBlocks: Int, memoryUsed: Long, diskUsed: Long, activeTasks: Int, failedTasks: Int, completedTasks: Int, totalTasks: Int, totalDuration: Long, totalInputBytes: Long, totalShuffleRead: Long, totalShuffleWrite: Long, maxMemory: Long, executorLogs: Map[String, String]) private[ui] class ExecutorsPage( parent: ExecutorsTab, threadDumpEnabled: Boolean) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val content = <div> { <div id="active-executors"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js", sparkUser = parent.sparkUser)}></script> ++ <script src={UIUtils.prependBaseUri("/static/executorspage.js", sparkUser = parent.sparkUser)}></script> ++ <script>setThreadDumpEnabled({threadDumpEnabled})</script> } </div>; UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true) } } private[spark] object ExecutorsPage { def getExecInfo( listener: ExecutorsListener, statusId: Int, isActive: Boolean): ExecutorSummary = { val status = if (isActive) { listener.activeStorageStatusList(statusId) } else { listener.deadStorageStatusList(statusId) } val execId = status.blockManagerId.executorId val hostPort = status.blockManagerId.hostPort val rddBlocks = status.numBlocks val memUsed = status.memUsed val maxMem = status.maxMem val diskUsed = status.diskUsed val taskSummary = listener.executorToTaskSummary.getOrElse(execId, ExecutorTaskSummary(execId)) new ExecutorSummary( execId, hostPort, isActive, rddBlocks, memUsed, diskUsed, taskSummary.totalCores, taskSummary.tasksMax, taskSummary.tasksActive, taskSummary.tasksFailed, taskSummary.tasksComplete, taskSummary.tasksActive + taskSummary.tasksFailed + taskSummary.tasksComplete, taskSummary.duration, taskSummary.jvmGCTime, taskSummary.inputBytes, taskSummary.shuffleRead, taskSummary.shuffleWrite, maxMem, taskSummary.executorLogs ) } }
Example 63
Source File: ExecutorThreadDumpPage.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.{Node, Text} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => UIUtils.decodeURLParameter(executorId) }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase } else { v1 > v2 } }.map { thread => val threadId = thread.threadId val blockedBy = thread.blockedByThreadId match { case Some(blockedByThreadId) => <div> Blocked by <a href={s"#${thread.blockedByThreadId}_td_id"}> Thread {thread.blockedByThreadId} {thread.blockedByLock}</a> </div> case None => Text("") } val heldLocks = thread.holdingLocks.mkString(", ") <tr id={s"thread_${threadId}_tr"} class="accordion-heading" onclick={s"toggleThreadStackTrace($threadId, false)"} onmouseover={s"onMouseOverAndOut($threadId)"} onmouseout={s"onMouseOverAndOut($threadId)"}> <td id={s"${threadId}_td_id"}>{threadId}</td> <td id={s"${threadId}_td_name"}>{thread.threadName}</td> <td id={s"${threadId}_td_state"}>{thread.threadState}</td> <td id={s"${threadId}_td_locking"}>{blockedBy}{heldLocks}</td> <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td> </tr> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)"> Collapse All </a></p> <div class="form-inline"> <div class="bs-example" data-example-id="simple-form-inline"> <div class="form-group"> <div class="input-group"> Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input> </div> </div> </div> </div> <p></p> // scalastyle:on } <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}> <thead> <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th> <th onClick="collapseAllThreadStackTrace(false)">Thread State</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Locks</th> </thead> <tbody>{dumpRows}</tbody> </table> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 64
Source File: EnvironmentPage.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener private def removePass(kv: (String, String)): (String, String) = { if (kv._1.toLowerCase.contains("password") || kv._1.toLowerCase.contains("secret")) { (kv._1, "******") } else kv } def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 65
Source File: StagesTab.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled val progressListener = parent.jobProgressListener val operationGraphListener = parent.operationGraphListener val executorsListener = parent.executorsListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = progressListener.schedulingMode == Some(SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val stageId = Option(request.getParameter("id")).map(_.toInt) stageId.foreach { id => if (progressListener.activeStages.contains(id)) { sc.foreach(_.cancelStage(id)) // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } }
Example 66
Source File: JobsTab.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class JobsTab(parent: SparkUI) extends SparkUITab(parent, "jobs") { val sc = parent.sc val killEnabled = parent.killEnabled val jobProgresslistener = parent.jobProgressListener val executorListener = parent.executorsListener val operationGraphListener = parent.operationGraphListener def isFairScheduler: Boolean = jobProgresslistener.schedulingMode == Some(SchedulingMode.FAIR) def getSparkUser: String = parent.getSparkUser attachPage(new AllJobsPage(this)) attachPage(new JobPage(this)) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val jobId = Option(request.getParameter("id")).map(_.toInt) jobId.foreach { id => if (jobProgresslistener.activeJobs.contains(id)) { sc.foreach(_.cancelJob(id)) // Do a quick pause here to give Spark time to kill the job so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } }
Example 67
Source File: PoolPage.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.StageInfo import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.progressListener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = Option(request.getParameter("poolname")).map { poolname => UIUtils.decodeURLParameter(poolname) }.getOrElse { throw new IllegalArgumentException(s"Missing poolname parameter") } val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val shouldShowActiveStages = activeStages.nonEmpty val activeStagesTable = new StageTableBase(request, activeStages, "", "activeStage", parent.basePath, "stages/pool", parent.progressListener, parent.isFairScheduler, parent.killEnabled, isFailedStage = false, parent.sparkUser) // For now, pool information is only accessible in live UIs val pools = sc.map(_.getPoolForName(poolName).getOrElse { throw new IllegalArgumentException(s"Unknown poolname: $poolName") }).toSeq val poolTable = new PoolTable(pools, parent) var content = <h4>Summary </h4> ++ poolTable.toNodeSeq if (shouldShowActiveStages) { content ++= <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq } UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 68
Source File: StagePageSuite.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.mockito.Mockito.{RETURNS_SMART_NULLS, mock, when} import org.apache.spark._ import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler._ import org.apache.spark.storage.StorageStatusListener import org.apache.spark.ui.exec.ExecutorsListener import org.apache.spark.ui.jobs.{JobProgressListener, StagePage, StagesTab} import org.apache.spark.ui.scope.RDDOperationGraphListener import org.apache.spark.util.Utils class StagePageSuite extends SparkFunSuite with LocalSparkContext { private val peakExecutionMemory = 10 test("peak execution memory should displayed") { val conf = new SparkConf(false) val html = renderStagePage(conf).toString().toLowerCase val targetString = "peak execution memory" assert(html.contains(targetString)) } test("SPARK-10543: peak execution memory should be per-task rather than cumulative") { val conf = new SparkConf(false) val html = renderStagePage(conf).toString().toLowerCase // verify min/25/50/75/max show task value not cumulative values assert(html.contains(s"<td>$peakExecutionMemory.0 b</td>" * 5)) } private def renderStagePage(conf: SparkConf): Seq[Node] = { val jobListener = new JobProgressListener(conf, Utils.getCurrentUserName()) val graphListener = new RDDOperationGraphListener(conf) val executorsListener = new ExecutorsListener(new StorageStatusListener(conf), conf) val tab = mock(classOf[StagesTab], RETURNS_SMART_NULLS) val request = mock(classOf[HttpServletRequest]) when(tab.conf).thenReturn(conf) when(tab.progressListener).thenReturn(jobListener) when(tab.operationGraphListener).thenReturn(graphListener) when(tab.executorsListener).thenReturn(executorsListener) when(tab.appName).thenReturn("testing") when(tab.headerTabs).thenReturn(Seq.empty) when(request.getParameter("id")).thenReturn("0") when(request.getParameter("attempt")).thenReturn("0") val page = new StagePage(tab) // Simulate a stage in job progress listener val stageInfo = new StageInfo(0, 0, "dummy", 1, Seq.empty, Seq.empty, "details") // Simulate two tasks to test PEAK_EXECUTION_MEMORY correctness (1 to 2).foreach { taskId => val taskInfo = new TaskInfo(taskId, taskId, 0, 0, "0", "localhost", TaskLocality.ANY, false) jobListener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo)) jobListener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo)) taskInfo.markFinished(TaskState.FINISHED) val taskMetrics = TaskMetrics.empty taskMetrics.incPeakExecutionMemory(peakExecutionMemory) jobListener.onTaskEnd( SparkListenerTaskEnd(0, 0, "result", Success, taskInfo, taskMetrics)) } jobListener.onStageCompleted(SparkListenerStageCompleted(stageInfo)) page.render(request) } }
Example 69
Source File: MetricsServlet.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.SecurityManager import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers: Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 70
Source File: MesosClusterPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.mesos.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.mesos.Protos.TaskStatus import org.apache.spark.deploy.mesos.MesosDriverDescription import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState import org.apache.spark.ui.{UIUtils, WebUIPage} private[mesos] class MesosClusterPage(parent: MesosClusterUI) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val state = parent.scheduler.getSchedulerState() val queuedHeaders = Seq("Driver ID", "Submit Date", "Main Class", "Driver Resources") val driverHeaders = queuedHeaders ++ Seq("Start Date", "Mesos Slave ID", "State") val retryHeaders = Seq("Driver ID", "Submit Date", "Description") ++ Seq("Last Failed Status", "Next Retry Time", "Attempt Count") val queuedTable = UIUtils.listingTable(queuedHeaders, queuedRow, state.queuedDrivers) val launchedTable = UIUtils.listingTable(driverHeaders, driverRow, state.launchedDrivers) val finishedTable = UIUtils.listingTable(driverHeaders, driverRow, state.finishedDrivers) val retryTable = UIUtils.listingTable(retryHeaders, retryRow, state.pendingRetryDrivers) val content = <p>Mesos Framework ID: {state.frameworkId}</p> <div class="row-fluid"> <div class="span12"> <h4>Queued Drivers:</h4> {queuedTable} <h4>Launched Drivers:</h4> {launchedTable} <h4>Finished Drivers:</h4> {finishedTable} <h4>Supervise drivers waiting for retry:</h4> {retryTable} </div> </div>; UIUtils.basicSparkPage(content, "Spark Drivers for Mesos cluster") } private def queuedRow(submission: MesosDriverDescription): Seq[Node] = { val id = submission.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{submission.submissionDate}</td> <td>{submission.command.mainClass}</td> <td>cpus: {submission.cores}, mem: {submission.mem}</td> </tr> } private def driverRow(state: MesosClusterSubmissionState): Seq[Node] = { val id = state.driverDescription.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{state.driverDescription.submissionDate}</td> <td>{state.driverDescription.command.mainClass}</td> <td>cpus: {state.driverDescription.cores}, mem: {state.driverDescription.mem}</td> <td>{state.startDate}</td> <td>{state.slaveId.getValue}</td> <td>{stateString(state.mesosTaskStatus)}</td> </tr> } private def retryRow(submission: MesosDriverDescription): Seq[Node] = { val id = submission.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{submission.submissionDate}</td> <td>{submission.command.mainClass}</td> <td>{submission.retryState.get.lastFailureStatus}</td> <td>{submission.retryState.get.nextRetry}</td> <td>{submission.retryState.get.retries}</td> </tr> } private def stateString(status: Option[TaskStatus]): String = { if (status.isEmpty) { return "" } val sb = new StringBuilder val s = status.get sb.append(s"State: ${s.getState}") if (status.get.hasMessage) { sb.append(s", Message: ${s.getMessage}") } if (status.get.hasHealthy) { sb.append(s", Healthy: ${s.getHealthy}") } if (status.get.hasSource) { sb.append(s", Source: ${s.getSource}") } if (status.get.hasReason) { sb.append(s", Reason: ${s.getReason}") } if (status.get.hasTimestamp) { sb.append(s", Time: ${s.getTimestamp}") } sb.toString() } }
Example 71
Source File: HistoryNotFoundPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 72
Source File: WorkerWebUI.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.{Logging, SparkConf} import org.apache.spark.deploy.worker.Worker import org.apache.spark.deploy.worker.ui.WorkerWebUI._ import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr)) } } private[ui] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR }
Example 73
Source File: ExecutorThreadDumpPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when // running in yarn-cluster mode. `request.getParameter("executorId")` will return // "%253Cdriver%253E". Therefore we need to decode it until we get the real id. var id = executorId var decodedId = URLDecoder.decode(id, "UTF-8") while (id != decodedId) { id = decodedId decodedId = URLDecoder.decode(id, "UTF-8") } id }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.map { thread => <div class="accordion-group"> <div class="accordion-heading" onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {thread.threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 74
Source File: EnvironmentPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 75
Source File: StoragePage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.storage import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.storage.RDDInfo import org.apache.spark.ui.{UIUtils, WebUIPage} import org.apache.spark.util.Utils private def rddRow(rdd: RDDInfo): Seq[Node] = { // scalastyle:off <tr> <td> <a href={"%s/storage/rdd?id=%s".format(UIUtils.prependBaseUri(parent.basePath), rdd.id)}> {rdd.name} </a> </td> <td>{rdd.storageLevel.description} </td> <td>{rdd.numCachedPartitions}</td> <td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)}</td> <td sorttable_customkey={rdd.memSize.toString}>{Utils.bytesToString(rdd.memSize)}</td> <td sorttable_customkey={rdd.externalBlockStoreSize.toString}>{Utils.bytesToString(rdd.externalBlockStoreSize)}</td> <td sorttable_customkey={rdd.diskSize.toString} >{Utils.bytesToString(rdd.diskSize)}</td> </tr> // scalastyle:on } }
Example 76
Source File: StagesTab.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled val progressListener = parent.jobProgressListener val operationGraphListener = parent.operationGraphListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = progressListener.schedulingMode.exists(_ == SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val killFlag = Option(request.getParameter("terminate")).getOrElse("false").toBoolean val stageId = Option(request.getParameter("id")).getOrElse("-1").toInt if (stageId >= 0 && killFlag && progressListener.activeStages.contains(stageId)) { sc.get.cancelStage(stageId) } // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } }
Example 77
Source File: PoolPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.StageInfo import org.apache.spark.ui.{WebUIPage, UIUtils} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.progressListener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = request.getParameter("poolname") require(poolName != null && poolName.nonEmpty, "Missing poolname parameter") val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse, parent.basePath, parent.progressListener, isFairScheduler = parent.isFairScheduler, killEnabled = parent.killEnabled) // For now, pool information is only accessible in live UIs val pools = sc.map(_.getPoolForName(poolName).get).toSeq val poolTable = new PoolTable(pools, parent) val content = <h4>Summary </h4> ++ poolTable.toNodeSeq ++ <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 78
Source File: KyuubiSessionSubPageSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.util.Try import org.apache.spark.{KyuubiSparkUtil, SparkConf, SparkContext, SparkFunSuite} import org.mockito.Mockito.when import org.scalatest.mock.MockitoSugar import yaooqinn.kyuubi.ui.{ExecutionInfo, KyuubiServerListener, SessionInfo} class KyuubiSessionSubPageSuite extends SparkFunSuite with MockitoSugar { var sc: SparkContext = _ var user: String = _ var tab: KyuubiSessionTab = _ override def beforeAll(): Unit = { val conf = new SparkConf(loadDefaults = true).setMaster("local").setAppName("test") sc = new SparkContext(conf) user = KyuubiSparkUtil.getCurrentUserName tab = new KyuubiSessionTab(user, sc) } override def afterAll(): Unit = { sc.stop() } test("render kyuubi session page") { val page = new KyuubiSessionSubPage(tab) val request = mock[HttpServletRequest] intercept[IllegalArgumentException](page.render(request)) val id = "id1" when(request.getParameter("id")).thenReturn(id) intercept[IllegalArgumentException](page.render(request)) val sessionInfo = mock[SessionInfo] val tab1 = mock[KyuubiSessionTab] when(request.getParameter("id")).thenReturn(id) val listener = mock[KyuubiServerListener] when(tab1.listener).thenReturn(listener) when(listener.getSession(id)).thenReturn(Some(sessionInfo)) when(sessionInfo.sessionId).thenReturn("1") when(listener.getExecutionList).thenReturn(Seq[ExecutionInfo]()) when(tab1.appName).thenReturn("name") when(tab1.headerTabs).thenReturn(Seq[WebUITab]()) val page2 = new KyuubiSessionSubPage(tab1) assert(Try { page2.render(request) }.isSuccess ) } }
Example 79
Source File: KyuubiSessionPageSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.util.Try import org.apache.spark.{KyuubiSparkUtil, SparkConf, SparkContext, SparkFunSuite} import org.scalatest.mock.MockitoSugar class KyuubiSessionPageSuite extends SparkFunSuite with MockitoSugar { var sc: SparkContext = _ var user: String = _ var tab: KyuubiSessionTab = _ override def beforeAll(): Unit = { val conf = new SparkConf(loadDefaults = true).setMaster("local").setAppName("test") sc = new SparkContext(conf) user = KyuubiSparkUtil.getCurrentUserName tab = new KyuubiSessionTab(user, sc) } override def afterAll(): Unit = { sc.stop() } test("render kyuubi session page") { val page = new KyuubiSessionPage(tab) val request = mock[HttpServletRequest] assert(Try { page.render(request) }.isSuccess ) } }
Example 80
Source File: MetricsServlet.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.SecurityManager import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) //最终生成处理/metrics/json请求的ServletContextHandler,而请求的真正处理由getMetricsSnapshot方法 //利用fastJson解析,生成 def getHandlers: Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 81
Source File: MesosClusterPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.mesos.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.mesos.Protos.TaskStatus import org.apache.spark.deploy.mesos.MesosDriverDescription import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState import org.apache.spark.ui.{UIUtils, WebUIPage} private[mesos] class MesosClusterPage(parent: MesosClusterUI) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val state = parent.scheduler.getSchedulerState() val queuedHeaders = Seq("Driver ID", "Submit Date", "Main Class", "Driver Resources") val driverHeaders = queuedHeaders ++ Seq("Start Date", "Mesos Slave ID", "State") val retryHeaders = Seq("Driver ID", "Submit Date", "Description") ++ Seq("Last Failed Status", "Next Retry Time", "Attempt Count") val queuedTable = UIUtils.listingTable(queuedHeaders, queuedRow, state.queuedDrivers) val launchedTable = UIUtils.listingTable(driverHeaders, driverRow, state.launchedDrivers) val finishedTable = UIUtils.listingTable(driverHeaders, driverRow, state.finishedDrivers) val retryTable = UIUtils.listingTable(retryHeaders, retryRow, state.pendingRetryDrivers) val content = <p>Mesos Framework ID: {state.frameworkId}</p> <div class="row-fluid"> <div class="span12"> <h4>Queued Drivers:</h4> {queuedTable} <h4>Launched Drivers:</h4> {launchedTable} <h4>Finished Drivers:</h4> {finishedTable} <h4>Supervise drivers waiting for retry:</h4> {retryTable} </div> </div>; UIUtils.basicSparkPage(content, "Spark Drivers for Mesos cluster") } private def queuedRow(submission: MesosDriverDescription): Seq[Node] = { val id = submission.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{submission.submissionDate}</td> <td>{submission.command.mainClass}</td> <td>cpus: {submission.cores}, mem: {submission.mem}</td> </tr> } private def driverRow(state: MesosClusterSubmissionState): Seq[Node] = { val id = state.driverDescription.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{state.driverDescription.submissionDate}</td> <td>{state.driverDescription.command.mainClass}</td> <td>cpus: {state.driverDescription.cores}, mem: {state.driverDescription.mem}</td> <td>{state.startDate}</td> <td>{state.slaveId.getValue}</td> <td>{stateString(state.mesosTaskStatus)}</td> </tr> } private def retryRow(submission: MesosDriverDescription): Seq[Node] = { val id = submission.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{submission.submissionDate}</td> <td>{submission.command.mainClass}</td> <td>{submission.retryState.get.lastFailureStatus}</td> <td>{submission.retryState.get.nextRetry}</td> <td>{submission.retryState.get.retries}</td> </tr> } private def stateString(status: Option[TaskStatus]): String = { if (status.isEmpty) { return "" } val sb = new StringBuilder val s = status.get sb.append(s"State: ${s.getState}") if (status.get.hasMessage) { sb.append(s", Message: ${s.getMessage}") } if (status.get.hasHealthy) { sb.append(s", Healthy: ${s.getHealthy}") } if (status.get.hasSource) { sb.append(s", Source: ${s.getSource}") } if (status.get.hasReason) { sb.append(s", Reason: ${s.getReason}") } if (status.get.hasTimestamp) { sb.append(s", Time: ${s.getTimestamp}") } sb.toString() } }
Example 82
Source File: HistoryNotFoundPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging //如果没有指定参数,假设用户未启用事件日志记录 val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 83
Source File: ApplicationPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.deploy.ExecutorState import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState} import org.apache.spark.deploy.master.ExecutorDesc import org.apache.spark.ui.{UIUtils, WebUIPage} import org.apache.spark.util.Utils private[ui] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app") { private val master = parent.masterEndpointRef def render(request: HttpServletRequest): Seq[Node] = { val appId = request.getParameter("appId") val state = master.askWithRetry[MasterStateResponse](RequestMasterState) val app = state.activeApps.find(_.id == appId).getOrElse({ state.completedApps.find(_.id == appId).getOrElse(null) }) if (app == null) { val msg = <div class="row-fluid">No running application with ID {appId}</div> return UIUtils.basicSparkPage(msg, "Not Found") } val executorHeaders = Seq("ExecutorID", "Worker", "Cores", "Memory", "State", "Logs") val allExecutors = (app.executors.values ++ app.removedExecutors).toSet.toSeq // This includes executors that are either still running or have exited cleanly //这包括仍在运行或已经完全退出的执行者 val executors = allExecutors.filter { exec => !ExecutorState.isFinished(exec.state) || exec.state == ExecutorState.EXITED } val removedExecutors = allExecutors.diff(executors) val executorsTable = UIUtils.listingTable(executorHeaders, executorRow, executors) val removedExecutorsTable = UIUtils.listingTable(executorHeaders, executorRow, removedExecutors) val content = <div class="row-fluid"> <div class="span12"> <ul class="unstyled"> <li><strong>ID:</strong> {app.id}</li> <li><strong>Name:</strong> {app.desc.name}</li> <li><strong>User:</strong> {app.desc.user}</li> <li><strong>Cores:</strong> { if (app.desc.maxCores.isEmpty) { "Unlimited (%s granted)".format(app.coresGranted) } else { "%s (%s granted, %s left)".format( app.desc.maxCores.get, app.coresGranted, app.coresLeft) } } </li> <li> <strong>Executor Memory:</strong> {Utils.megabytesToString(app.desc.memoryPerExecutorMB)} </li> <li><strong>Submit Date:</strong> {app.submitDate}</li> <li><strong>State:</strong> {app.state}</li> <li><strong><a href={app.desc.appUiUrl}>Application Detail UI</a></strong></li> </ul> </div> </div> <div class="row-fluid"> <!-- Executors --> <div class="span12"> <h4> Executor Summary </h4> {executorsTable} { if (removedExecutors.nonEmpty) { <h4> Removed Executors </h4> ++ removedExecutorsTable } } </div> </div>; UIUtils.basicSparkPage(content, "Application: " + app.desc.name) } private def executorRow(executor: ExecutorDesc): Seq[Node] = { <tr> <td>{executor.id}</td> <td> <a href={executor.worker.webUiAddress}>{executor.worker.id}</a> </td> <td>{executor.cores}</td> <td>{executor.memory}</td> <td>{executor.state}</td> <td> <a href={"%s/logPage?appId=%s&executorId=%s&logType=stdout" .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stdout</a> <a href={"%s/logPage?appId=%s&executorId=%s&logType=stderr" .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stderr</a> </td> </tr> } }
Example 84
Source File: WorkerWebUI.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.{Logging, SparkConf} import org.apache.spark.deploy.worker.Worker import org.apache.spark.deploy.worker.ui.WorkerWebUI._ import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr)) } } private[worker] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR val DEFAULT_RETAINED_DRIVERS = 1000 val DEFAULT_RETAINED_EXECUTORS = 1000 }
Example 85
Source File: ExecutorThreadDumpPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when // running in yarn-cluster mode. `request.getParameter("executorId")` will return // "%253Cdriver%253E". Therefore we need to decode it until we get the real id. var id = executorId var decodedId = URLDecoder.decode(id, "UTF-8") while (id != decodedId) { id = decodedId decodedId = URLDecoder.decode(id, "UTF-8") } id }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => { val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase } else { v1 > v2 } } }.map { thread => val threadName = thread.threadName val className = "accordion-heading " + { if (threadName.contains("Executor task launch")) { "executor-thread" } else { "non-executor-thread" } } <div class="accordion-group"> <div class={className} onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 86
Source File: EnvironmentPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 87
Source File: StagesTab.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled val progressListener = parent.jobProgressListener val operationGraphListener = parent.operationGraphListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = progressListener.schedulingMode.exists(_ == SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val killFlag = Option(request.getParameter("terminate")).getOrElse("false").toBoolean val stageId = Option(request.getParameter("id")).getOrElse("-1").toInt if (stageId >= 0 && killFlag && progressListener.activeStages.contains(stageId)) { sc.get.cancelStage(stageId) } // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. //在这里快速暂停给Spark时间杀死stage,所以它在刷新后显示为被杀。 // 请注意,这将阻止服务线程,所以时间应该被限制在持续时间。 Thread.sleep(100) } } }
Example 88
Source File: PoolPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.StageInfo import org.apache.spark.ui.{WebUIPage, UIUtils} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.progressListener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = request.getParameter("poolname") require(poolName != null && poolName.nonEmpty, "Missing poolname parameter") val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse, parent.basePath, parent.progressListener, isFairScheduler = parent.isFairScheduler, killEnabled = parent.killEnabled) // For now, pool information is only accessible in live UIs //现在,池信息只能在实时UI中访问 val pools = sc.map(_.getPoolForName(poolName).get).toSeq val poolTable = new PoolTable(pools, parent) val content = <h4>Summary </h4> ++ poolTable.toNodeSeq ++ <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 89
Source File: StagePageSuite.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS} import org.apache.spark._ import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler._ import org.apache.spark.ui.jobs.{JobProgressListener, StagePage, StagesTab} import org.apache.spark.ui.scope.RDDOperationGraphListener class StagePageSuite extends SparkFunSuite with LocalSparkContext { //仅在启用不安全时才显示执行内存值 test("peak execution memory only displayed if unsafe is enabled") { val unsafeConf = "spark.sql.unsafe.enabled" val conf = new SparkConf(false).set(unsafeConf, "true") val html = renderStagePage(conf).toString().toLowerCase println("===="+html) val targetString = "peak execution memory" assert(html.contains(targetString)) // Disable unsafe and make sure it's not there //禁用不安全的,并确保它不在那里 val conf2 = new SparkConf(false).set(unsafeConf, "false") val html2 = renderStagePage(conf2).toString().toLowerCase assert(!html2.contains(targetString)) // Avoid setting anything; it should be displayed by default //避免设置任何东西,它应该默认显示 val conf3 = new SparkConf(false) val html3 = renderStagePage(conf3).toString().toLowerCase assert(html3.contains(targetString)) } test("SPARK-10543: peak execution memory should be per-task rather than cumulative") { val unsafeConf = "spark.sql.unsafe.enabled" val conf = new SparkConf(false).set(unsafeConf, "true") val html = renderStagePage(conf).toString().toLowerCase // verify min/25/50/75/max show task value not cumulative values //验证min / 25/50/75 / max显示任务值不是累积值 assert(html.contains("<td>10.0 b</td>" * 5)) } private def renderStagePage(conf: SparkConf): Seq[Node] = { val jobListener = new JobProgressListener(conf) val graphListener = new RDDOperationGraphListener(conf) val tab = mock(classOf[StagesTab], RETURNS_SMART_NULLS) val request = mock(classOf[HttpServletRequest]) when(tab.conf).thenReturn(conf) when(tab.progressListener).thenReturn(jobListener) when(tab.operationGraphListener).thenReturn(graphListener) when(tab.appName).thenReturn("testing") when(tab.headerTabs).thenReturn(Seq.empty) when(request.getParameter("id")).thenReturn("0") when(request.getParameter("attempt")).thenReturn("0") val page = new StagePage(tab) // Simulate a stage in job progress listener //在工作进度侦听器中模拟一个阶段 val stageInfo = new StageInfo(0, 0, "dummy", 1, Seq.empty, Seq.empty, "details") // Simulate two tasks to test PEAK_EXECUTION_MEMORY correctness (1 to 2).foreach { taskId => val taskInfo = new TaskInfo(taskId, taskId, 0, 0, "0", "localhost", TaskLocality.ANY, false) val peakExecutionMemory = 10 taskInfo.accumulables += new AccumulableInfo(0, InternalAccumulator.PEAK_EXECUTION_MEMORY, Some(peakExecutionMemory.toString), (peakExecutionMemory * taskId).toString, true) jobListener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo)) jobListener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo)) taskInfo.markSuccessful() jobListener.onTaskEnd( SparkListenerTaskEnd(0, 0, "result", Success, taskInfo, TaskMetrics.empty)) } jobListener.onStageCompleted(SparkListenerStageCompleted(stageInfo)) page.render(request) } }
Example 90
Source File: YarnProxyRedirectFilter.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.yarn import javax.servlet._ import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import org.apache.spark.internal.Logging class YarnProxyRedirectFilter extends Filter with Logging { import YarnProxyRedirectFilter._ override def destroy(): Unit = { } override def init(config: FilterConfig): Unit = { } override def doFilter(req: ServletRequest, res: ServletResponse, chain: FilterChain): Unit = { val hreq = req.asInstanceOf[HttpServletRequest] // The YARN proxy will send a request with the "proxy-user" cookie set to the YARN's client // user name. We don't expect any other clients to set this cookie, since the SHS does not // use cookies for anything. Option(hreq.getCookies()).flatMap(_.find(_.getName() == COOKIE_NAME)) match { case Some(_) => doRedirect(hreq, res.asInstanceOf[HttpServletResponse]) case _ => chain.doFilter(req, res) } } private def doRedirect(req: HttpServletRequest, res: HttpServletResponse): Unit = { val redirect = req.getRequestURL().toString() // Need a client-side redirect instead of an HTTP one, otherwise the YARN proxy itself // will handle the redirect and get into an infinite loop. val content = s""" |<html xmlns="http://www.w3.org/1999/xhtml"> |<head> | <title>Spark History Server Redirect</title> | <meta http-equiv="refresh" content="0;URL='$redirect'" /> |</head> |<body> | <p>The requested page can be found at: <a href="$redirect">$redirect</a>.</p> |</body> |</html> """.stripMargin logDebug(s"Redirecting YARN proxy request to $redirect.") res.setStatus(HttpServletResponse.SC_OK) res.setContentType("text/html") res.getWriter().write(content) } } private[spark] object YarnProxyRedirectFilter { val COOKIE_NAME = "proxy-user" }
Example 91
Source File: MetricsServlet.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 92
Source File: WorkerWebUI.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.deploy.worker.Worker import org.apache.spark.internal.Logging import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr, worker.conf)) } } private[worker] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR val DEFAULT_RETAINED_DRIVERS = 1000 val DEFAULT_RETAINED_EXECUTORS = 1000 }
Example 93
Source File: HistoryPage.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.history import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.status.api.v1.ApplicationInfo import org.apache.spark.ui.{UIUtils, WebUIPage} private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { // stripXSS is called first to remove suspicious characters used in XSS attacks val requestedIncomplete = Option(UIUtils.stripXSS(request.getParameter("showIncomplete"))).getOrElse("false").toBoolean val allAppsSize = parent.getApplicationList() .count(isApplicationCompleted(_) != requestedIncomplete) val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess() val lastUpdatedTime = parent.getLastUpdatedTime() val providerConfig = parent.getProviderConfig() val content = <script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> <div> <div class="container-fluid"> <ul class="unstyled"> {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }} </ul> { if (eventLogsUnderProcessCount > 0) { <p>There are {eventLogsUnderProcessCount} event log(s) currently being processed which may result in additional applications getting listed on this page. Refresh the page to view updates. </p> } } { if (lastUpdatedTime > 0) { <p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p> } } { <p>Client local time zone: <span id="time-zone"></span></p> } { if (allAppsSize > 0) { <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++ <div id="history-summary" class="row-fluid"></div> ++ <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++ <script>setAppLimit({parent.maxApplications})</script> } else if (requestedIncomplete) { <h4>No incomplete applications found!</h4> } else if (eventLogsUnderProcessCount > 0) { <h4>No completed applications found!</h4> } else { <h4>No completed applications found!</h4> ++ parent.emptyListingHtml } } <a href={makePageLink(!requestedIncomplete)}> { if (requestedIncomplete) { "Back to completed applications" } else { "Show incomplete applications" } } </a> </div> </div> UIUtils.basicSparkPage(content, "History Server", true) } private def makePageLink(showIncomplete: Boolean): String = { UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete) } private def isApplicationCompleted(appInfo: ApplicationInfo): Boolean = { appInfo.attempts.nonEmpty && appInfo.attempts.head.completed } }
Example 94
Source File: ExecutorsTab.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils, WebUIPage} private[ui] class ExecutorsTab(parent: SparkUI) extends SparkUITab(parent, "executors") { init() private def init(): Unit = { val threadDumpEnabled = parent.sc.isDefined && parent.conf.getBoolean("spark.ui.threadDumpsEnabled", true) attachPage(new ExecutorsPage(this, threadDumpEnabled)) if (threadDumpEnabled) { attachPage(new ExecutorThreadDumpPage(this, parent.sc)) } } } private[ui] class ExecutorsPage( parent: SparkUITab, threadDumpEnabled: Boolean) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val content = <div> { <div id="active-executors" class="row-fluid"></div> ++ <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++ <script src={UIUtils.prependBaseUri("/static/executorspage.js")}></script> ++ <script>setThreadDumpEnabled({threadDumpEnabled})</script> } </div> UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true) } }
Example 95
Source File: ExecutorThreadDumpPage.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.util.Locale import javax.servlet.http.HttpServletRequest import scala.xml.{Node, Text} import org.apache.spark.SparkContext import org.apache.spark.ui.{SparkUITab, UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage( parent: SparkUITab, sc: Option[SparkContext]) extends WebUIPage("threadDump") { // stripXSS is called first to remove suspicious characters used in XSS attacks def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(UIUtils.stripXSS(request.getParameter("executorId"))).map { executorId => UIUtils.decodeURLParameter(executorId) }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase(Locale.ROOT) < threadTrace2.threadName.toLowerCase(Locale.ROOT) } else { v1 > v2 } }.map { thread => val threadId = thread.threadId val blockedBy = thread.blockedByThreadId match { case Some(_) => <div> Blocked by <a href={s"#${thread.blockedByThreadId}_td_id"}> Thread {thread.blockedByThreadId} {thread.blockedByLock}</a> </div> case None => Text("") } val heldLocks = thread.holdingLocks.mkString(", ") <tr id={s"thread_${threadId}_tr"} class="accordion-heading" onclick={s"toggleThreadStackTrace($threadId, false)"} onmouseover={s"onMouseOverAndOut($threadId)"} onmouseout={s"onMouseOverAndOut($threadId)"}> <td id={s"${threadId}_td_id"}>{threadId}</td> <td id={s"${threadId}_td_name"}>{thread.threadName}</td> <td id={s"${threadId}_td_state"}>{thread.threadState}</td> <td id={s"${threadId}_td_locking"}>{blockedBy}{heldLocks}</td> <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td> </tr> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)"> Collapse All </a></p> <div class="form-inline"> <div class="bs-example" data-example-id="simple-form-inline"> <div class="form-group"> <div class="input-group"> Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input> </div> </div> </div> </div> <p></p> // scalastyle:on } <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}> <thead> <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th> <th onClick="collapseAllThreadStackTrace(false)">Thread State</th> <th onClick="collapseAllThreadStackTrace(false)">Thread Locks</th> </thead> <tbody>{dumpRows}</tbody> </table> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 96
Source File: EnvironmentPage.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.SparkConf import org.apache.spark.status.AppStatusStore import org.apache.spark.ui._ import org.apache.spark.util.Utils private[ui] class EnvironmentPage( parent: EnvironmentTab, conf: SparkConf, store: AppStatusStore) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val appEnv = store.environmentInfo() val jvmInformation = Map( "Java Version" -> appEnv.runtime.javaVersion, "Java Home" -> appEnv.runtime.javaHome, "Scala Version" -> appEnv.runtime.scalaVersion) val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable(propertyHeader, propertyRow, Utils.redact(conf, appEnv.sparkProperties.toSeq), fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, appEnv.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, appEnv.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> } private[ui] class EnvironmentTab( parent: SparkUI, store: AppStatusStore) extends SparkUITab(parent, "environment") { attachPage(new EnvironmentPage(this, parent.conf, store)) }
Example 97
Source File: StagesTab.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.status.AppStatusStore import org.apache.spark.status.api.v1.StageStatus import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils} private[ui] class StagesTab(val parent: SparkUI, val store: AppStatusStore) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled attachPage(new AllStagesPage(this)) attachPage(new StagePage(this, store)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = { store .environmentInfo() .sparkProperties .contains(("spark.scheduler.mode", SchedulingMode.FAIR.toString)) } def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { // stripXSS is called first to remove suspicious characters used in XSS attacks val stageId = Option(UIUtils.stripXSS(request.getParameter("id"))).map(_.toInt) stageId.foreach { id => store.asOption(store.lastStageAttempt(id)).foreach { stage => val status = stage.status if (status == StageStatus.ACTIVE || status == StageStatus.PENDING) { sc.foreach(_.cancelStage(id, "killed via the Web UI")) // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } } }
Example 98
Source File: JobsTab.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.collection.JavaConverters._ import org.apache.spark.JobExecutionStatus import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.status.AppStatusStore import org.apache.spark.ui._ private[ui] class JobsTab(parent: SparkUI, store: AppStatusStore) extends SparkUITab(parent, "jobs") { val sc = parent.sc val killEnabled = parent.killEnabled def isFairScheduler: Boolean = { store .environmentInfo() .sparkProperties .contains(("spark.scheduler.mode", SchedulingMode.FAIR.toString)) } def getSparkUser: String = parent.getSparkUser attachPage(new AllJobsPage(this, store)) attachPage(new JobPage(this, store)) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { // stripXSS is called first to remove suspicious characters used in XSS attacks val jobId = Option(UIUtils.stripXSS(request.getParameter("id"))).map(_.toInt) jobId.foreach { id => store.asOption(store.job(id)).foreach { job => if (job.status == JobExecutionStatus.RUNNING) { sc.foreach(_.cancelJob(id)) // Do a quick pause here to give Spark time to kill the job so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } } } } }
Example 99
Source File: PoolPage.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.status.PoolData import org.apache.spark.status.api.v1._ import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { def render(request: HttpServletRequest): Seq[Node] = { // stripXSS is called first to remove suspicious characters used in XSS attacks val poolName = Option(UIUtils.stripXSS(request.getParameter("poolname"))).map { poolname => UIUtils.decodeURLParameter(poolname) }.getOrElse { throw new IllegalArgumentException(s"Missing poolname parameter") } // For now, pool information is only accessible in live UIs val pool = parent.sc.flatMap(_.getPoolForName(poolName)).getOrElse { throw new IllegalArgumentException(s"Unknown pool: $poolName") } val uiPool = parent.store.asOption(parent.store.pool(poolName)).getOrElse( new PoolData(poolName, Set())) val activeStages = uiPool.stageIds.toSeq.map(parent.store.lastStageAttempt(_)) val activeStagesTable = new StageTableBase(parent.store, request, activeStages, "", "activeStage", parent.basePath, "stages/pool", parent.isFairScheduler, parent.killEnabled, false) val poolTable = new PoolTable(Map(pool -> uiPool), parent) var content = <h4>Summary </h4> ++ poolTable.toNodeSeq if (activeStages.nonEmpty) { content ++= <h4>Active Stages ({activeStages.size})</h4> ++ activeStagesTable.toNodeSeq } UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } }
Example 100
Source File: MetricsServlet.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SparkConf, SecurityManager} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 101
Source File: MesosClusterPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.mesos.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.mesos.Protos.TaskStatus import org.apache.spark.deploy.mesos.MesosDriverDescription import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState import org.apache.spark.ui.{UIUtils, WebUIPage} private[mesos] class MesosClusterPage(parent: MesosClusterUI) extends WebUIPage("") { def render(request: HttpServletRequest): Seq[Node] = { val state = parent.scheduler.getSchedulerState() val queuedHeaders = Seq("Driver ID", "Submit Date", "Main Class", "Driver Resources") val driverHeaders = queuedHeaders ++ Seq("Start Date", "Mesos Slave ID", "State") val retryHeaders = Seq("Driver ID", "Submit Date", "Description") ++ Seq("Last Failed Status", "Next Retry Time", "Attempt Count") val queuedTable = UIUtils.listingTable(queuedHeaders, queuedRow, state.queuedDrivers) val launchedTable = UIUtils.listingTable(driverHeaders, driverRow, state.launchedDrivers) val finishedTable = UIUtils.listingTable(driverHeaders, driverRow, state.finishedDrivers) val retryTable = UIUtils.listingTable(retryHeaders, retryRow, state.pendingRetryDrivers) val content = <p>Mesos Framework ID: {state.frameworkId}</p> <div class="row-fluid"> <div class="span12"> <h4>Queued Drivers:</h4> {queuedTable} <h4>Launched Drivers:</h4> {launchedTable} <h4>Finished Drivers:</h4> {finishedTable} <h4>Supervise drivers waiting for retry:</h4> {retryTable} </div> </div>; UIUtils.basicSparkPage(content, "Spark Drivers for Mesos cluster") } private def queuedRow(submission: MesosDriverDescription): Seq[Node] = { val id = submission.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{submission.submissionDate}</td> <td>{submission.command.mainClass}</td> <td>cpus: {submission.cores}, mem: {submission.mem}</td> </tr> } private def driverRow(state: MesosClusterSubmissionState): Seq[Node] = { val id = state.driverDescription.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{state.driverDescription.submissionDate}</td> <td>{state.driverDescription.command.mainClass}</td> <td>cpus: {state.driverDescription.cores}, mem: {state.driverDescription.mem}</td> <td>{state.startDate}</td> <td>{state.slaveId.getValue}</td> <td>{stateString(state.mesosTaskStatus)}</td> </tr> } private def retryRow(submission: MesosDriverDescription): Seq[Node] = { val id = submission.submissionId <tr> <td><a href={s"driver?id=$id"}>{id}</a></td> <td>{submission.submissionDate}</td> <td>{submission.command.mainClass}</td> <td>{submission.retryState.get.lastFailureStatus}</td> <td>{submission.retryState.get.nextRetry}</td> <td>{submission.retryState.get.retries}</td> </tr> } private def stateString(status: Option[TaskStatus]): String = { if (status.isEmpty) { return "" } val sb = new StringBuilder val s = status.get sb.append(s"State: ${s.getState}") if (status.get.hasMessage) { sb.append(s", Message: ${s.getMessage}") } if (status.get.hasHealthy) { sb.append(s", Healthy: ${s.getHealthy}") } if (status.get.hasSource) { sb.append(s", Source: ${s.getSource}") } if (status.get.hasReason) { sb.append(s", Reason: ${s.getReason}") } if (status.get.hasTimestamp) { sb.append(s", Time: ${s.getTimestamp}") } sb.toString() } }
Example 102
Source File: HistoryNotFoundPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 103
Source File: ApplicationPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.deploy.ExecutorState import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState} import org.apache.spark.deploy.master.ExecutorDesc import org.apache.spark.ui.{UIUtils, WebUIPage} import org.apache.spark.util.Utils private[ui] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app") { private val master = parent.masterEndpointRef def render(request: HttpServletRequest): Seq[Node] = { val appId = request.getParameter("appId") val state = master.askWithRetry[MasterStateResponse](RequestMasterState) val app = state.activeApps.find(_.id == appId).getOrElse({ state.completedApps.find(_.id == appId).getOrElse(null) }) if (app == null) { val msg = <div class="row-fluid">No running application with ID {appId}</div> return UIUtils.basicSparkPage(msg, "Not Found") } val executorHeaders = Seq("ExecutorID", "Worker", "Cores", "Memory", "State", "Logs") val allExecutors = (app.executors.values ++ app.removedExecutors).toSet.toSeq // This includes executors that are either still running or have exited cleanly val executors = allExecutors.filter { exec => !ExecutorState.isFinished(exec.state) || exec.state == ExecutorState.EXITED } val removedExecutors = allExecutors.diff(executors) val executorsTable = UIUtils.listingTable(executorHeaders, executorRow, executors) val removedExecutorsTable = UIUtils.listingTable(executorHeaders, executorRow, removedExecutors) val content = <div class="row-fluid"> <div class="span12"> <ul class="unstyled"> <li><strong>ID:</strong> {app.id}</li> <li><strong>Name:</strong> {app.desc.name}</li> <li><strong>User:</strong> {app.desc.user}</li> <li><strong>Cores:</strong> { if (app.desc.maxCores.isEmpty) { "Unlimited (%s granted)".format(app.coresGranted) } else { "%s (%s granted, %s left)".format( app.desc.maxCores.get, app.coresGranted, app.coresLeft) } } </li> <li> <strong>Executor Memory:</strong> {Utils.megabytesToString(app.desc.memoryPerExecutorMB)} </li> <li><strong>Submit Date:</strong> {app.submitDate}</li> <li><strong>State:</strong> {app.state}</li> <li><strong><a href={app.curAppUIUrl}>Application Detail UI</a></strong></li> </ul> </div> </div> <div class="row-fluid"> <!-- Executors --> <div class="span12"> <h4> Executor Summary </h4> {executorsTable} { if (removedExecutors.nonEmpty) { <h4> Removed Executors </h4> ++ removedExecutorsTable } } </div> </div>; UIUtils.basicSparkPage(content, "Application: " + app.desc.name) } private def executorRow(executor: ExecutorDesc): Seq[Node] = { <tr> <td>{executor.id}</td> <td> <a href={executor.worker.webUiAddress}>{executor.worker.id}</a> </td> <td>{executor.cores}</td> <td>{executor.memory}</td> <td>{executor.state}</td> <td> <a href={"%s/logPage?appId=%s&executorId=%s&logType=stdout" .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stdout</a> <a href={"%s/logPage?appId=%s&executorId=%s&logType=stderr" .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stderr</a> </td> </tr> } }
Example 104
Source File: WorkerWebUI.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.worker.ui import java.io.File import javax.servlet.http.HttpServletRequest import org.apache.spark.Logging import org.apache.spark.deploy.worker.Worker import org.apache.spark.ui.{SparkUI, WebUI} import org.apache.spark.ui.JettyUtils._ import org.apache.spark.util.RpcUtils def initialize() { val logPage = new LogPage(this) attachPage(logPage) attachPage(new WorkerPage(this)) attachHandler(createStaticHandler(WorkerWebUI.STATIC_RESOURCE_BASE, "/static")) attachHandler(createServletHandler("/log", (request: HttpServletRequest) => logPage.renderLog(request), worker.securityMgr, worker.conf)) } } private[worker] object WorkerWebUI { val STATIC_RESOURCE_BASE = SparkUI.STATIC_RESOURCE_DIR val DEFAULT_RETAINED_DRIVERS = 1000 val DEFAULT_RETAINED_EXECUTORS = 1000 }
Example 105
Source File: ExecutorThreadDumpPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => UIUtils.decodeURLParameter(executorId) }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => { val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase } else { v1 > v2 } } }.map { thread => val threadName = thread.threadName val className = "accordion-heading " + { if (threadName.contains("Executor task launch")) { "executor-thread" } else { "non-executor-thread" } } <div class="accordion-group"> <div class={className} onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 106
Source File: EnvironmentPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.env import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") { private val listener = parent.listener def render(request: HttpServletRequest): Seq[Node] = { val runtimeInformationTable = UIUtils.listingTable( propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true) val sparkPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true) val systemPropertiesTable = UIUtils.listingTable( propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true) val classpathEntriesTable = UIUtils.listingTable( classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true) val content = <span> <h4>Runtime Information</h4> {runtimeInformationTable} <h4>Spark Properties</h4> {sparkPropertiesTable} <h4>System Properties</h4> {systemPropertiesTable} <h4>Classpath Entries</h4> {classpathEntriesTable} </span> UIUtils.headerSparkPage("Environment", content, parent) } private def propertyHeader = Seq("Name", "Value") private def classPathHeaders = Seq("Resource", "Source") private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr> private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr> }
Example 107
Source File: StagesTab.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.{SparkUI, SparkUITab} private[ui] class StagesTab(parent: SparkUI) extends SparkUITab(parent, "stages") { val sc = parent.sc val conf = parent.conf val killEnabled = parent.killEnabled val progressListener = parent.jobProgressListener val operationGraphListener = parent.operationGraphListener attachPage(new AllStagesPage(this)) attachPage(new StagePage(this)) attachPage(new PoolPage(this)) def isFairScheduler: Boolean = progressListener.schedulingMode.exists(_ == SchedulingMode.FAIR) def handleKillRequest(request: HttpServletRequest): Unit = { if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) { val killFlag = Option(request.getParameter("terminate")).getOrElse("false").toBoolean val stageId = Option(request.getParameter("id")).getOrElse("-1").toInt if (stageId >= 0 && killFlag && progressListener.activeStages.contains(stageId)) { sc.get.cancelStage(stageId) } // Do a quick pause here to give Spark time to kill the stage so it shows up as // killed after the refresh. Note that this will block the serving thread so the // time should be limited in duration. Thread.sleep(100) } } }
Example 108
Source File: PoolPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.scheduler.StageInfo import org.apache.spark.ui.{WebUIPage, UIUtils} private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") { private val sc = parent.sc private val listener = parent.progressListener def render(request: HttpServletRequest): Seq[Node] = { listener.synchronized { val poolName = Option(request.getParameter("poolname")).map { poolname => UIUtils.decodeURLParameter(poolname) }.getOrElse { throw new IllegalArgumentException(s"Missing poolname parameter") } val poolToActiveStages = listener.poolToActiveStages val activeStages = poolToActiveStages.get(poolName) match { case Some(s) => s.values.toSeq case None => Seq[StageInfo]() } val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse, parent.basePath, parent.progressListener, isFairScheduler = parent.isFairScheduler, killEnabled = parent.killEnabled) // For now, pool information is only accessible in live UIs val pools = sc.map(_.getPoolForName(poolName).getOrElse { throw new IllegalArgumentException(s"Unknown poolname: $poolName") }).toSeq val poolTable = new PoolTable(pools, parent) val content = <h4>Summary </h4> ++ poolTable.toNodeSeq ++ <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent) } } }
Example 109
Source File: StagePageSuite.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.ui import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS} import org.apache.spark._ import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler._ import org.apache.spark.ui.jobs.{JobProgressListener, StagePage, StagesTab} import org.apache.spark.ui.scope.RDDOperationGraphListener class StagePageSuite extends SparkFunSuite with LocalSparkContext { test("peak execution memory only displayed if unsafe is enabled") { val unsafeConf = "spark.sql.unsafe.enabled" val conf = new SparkConf(false).set(unsafeConf, "true") val html = renderStagePage(conf).toString().toLowerCase val targetString = "peak execution memory" assert(html.contains(targetString)) // Disable unsafe and make sure it's not there val conf2 = new SparkConf(false).set(unsafeConf, "false") val html2 = renderStagePage(conf2).toString().toLowerCase assert(!html2.contains(targetString)) // Avoid setting anything; it should be displayed by default val conf3 = new SparkConf(false) val html3 = renderStagePage(conf3).toString().toLowerCase assert(html3.contains(targetString)) } test("SPARK-10543: peak execution memory should be per-task rather than cumulative") { val unsafeConf = "spark.sql.unsafe.enabled" val conf = new SparkConf(false).set(unsafeConf, "true") val html = renderStagePage(conf).toString().toLowerCase // verify min/25/50/75/max show task value not cumulative values assert(html.contains("<td>10.0 b</td>" * 5)) } private def renderStagePage(conf: SparkConf): Seq[Node] = { val jobListener = new JobProgressListener(conf) val graphListener = new RDDOperationGraphListener(conf) val tab = mock(classOf[StagesTab], RETURNS_SMART_NULLS) val request = mock(classOf[HttpServletRequest]) when(tab.conf).thenReturn(conf) when(tab.progressListener).thenReturn(jobListener) when(tab.operationGraphListener).thenReturn(graphListener) when(tab.appName).thenReturn("testing") when(tab.headerTabs).thenReturn(Seq.empty) when(request.getParameter("id")).thenReturn("0") when(request.getParameter("attempt")).thenReturn("0") val page = new StagePage(tab) // Simulate a stage in job progress listener val stageInfo = new StageInfo(0, 0, "dummy", 1, Seq.empty, Seq.empty, "details") // Simulate two tasks to test PEAK_EXECUTION_MEMORY correctness (1 to 2).foreach { taskId => val taskInfo = new TaskInfo(taskId, taskId, 0, 0, "0", "localhost", TaskLocality.ANY, false) val peakExecutionMemory = 10 taskInfo.accumulables += new AccumulableInfo(0, InternalAccumulator.PEAK_EXECUTION_MEMORY, Some(peakExecutionMemory.toString), (peakExecutionMemory * taskId).toString, true) jobListener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo)) jobListener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo)) taskInfo.markSuccessful() jobListener.onTaskEnd( SparkListenerTaskEnd(0, 0, "result", Success, taskInfo, TaskMetrics.empty)) } jobListener.onStageCompleted(SparkListenerStageCompleted(stageInfo)) page.render(request) } }
Example 110
Source File: S3ConfigPage.scala From teamcity-s3-plugin with Apache License 2.0 | 5 votes |
package com.gu.teamcity import jetbrains.buildServer.controllers.admin.AdminPage import jetbrains.buildServer.serverSide.auth.Permission import jetbrains.buildServer.web.openapi.{Groupable, PagePlaces, PluginDescriptor} import javax.servlet.http.HttpServletRequest import java.util.Map class S3ConfigPage(extension: S3ConfigManager, pagePlaces: PagePlaces, descriptor: PluginDescriptor) extends AdminPage(pagePlaces, "S3", descriptor.getPluginResourcesPath("input.jsp"), "S3") { register() override def fillModel(model: Map[String, AnyRef], request: HttpServletRequest) { import collection.convert.wrapAll._ model.putAll(extension.details.mapValues(_.getOrElse(""))) } override def isAvailable(request: HttpServletRequest): Boolean = { super.isAvailable(request) && checkHasGlobalPermission(request, Permission.CHANGE_SERVER_SETTINGS) } def getGroup: String = { Groupable.SERVER_RELATED_GROUP } }
Example 111
Source File: S3ConfigController.scala From teamcity-s3-plugin with Apache License 2.0 | 5 votes |
package com.gu.teamcity import jetbrains.buildServer.controllers.MultipartFormController import jetbrains.buildServer.web.openapi.WebControllerManager import org.jetbrains.annotations.NotNull import org.springframework.web.servlet.ModelAndView import org.springframework.web.servlet.view.RedirectView import javax.servlet.http.HttpServletRequest import javax.servlet.http.HttpServletResponse class S3ConfigController(config: S3ConfigManager, webControllerManager: WebControllerManager) extends MultipartFormController { webControllerManager.registerController("/app/s3/**", this) protected def doPost(request: HttpServletRequest, response: HttpServletResponse): ModelAndView = { def param(name: String) = S3ConfigController.emptyAsNone(request.getParameter(name)) config.updateAndPersist(S3Config( param("artifactBucket"), param("buildManifestBucket"), param("tagManifestBucket"), param("accessKey"), param("secretKey") )) new ModelAndView(new RedirectView("/admin/admin.html?item=S3")) } } object S3ConfigController { def emptyAsNone(s: String): Option[String] = Option(s).filterNot(_.trim.isEmpty) }