java.net.URLDecoder Scala Examples
The following examples show how to use java.net.URLDecoder.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: MeshTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.mesh import java.io.File import java.net.URLDecoder import breeze.linalg.DenseVector import scalismo.ScalismoTestSuite import scalismo.common.{PointId, UnstructuredPointsDomain} import scalismo.geometry.Point.implicits._ import scalismo.geometry.{_3D, Point} import scalismo.io.MeshIO import scalismo.registration.{RotationSpace, ScalingSpace} import scala.language.implicitConversions class MeshTests extends ScalismoTestSuite { implicit def doubleToFloat(d: Double): Float = d.toFloat implicit def intToPointId(i: Int): PointId = PointId(i) describe("a mesh") { val path = getClass.getResource("/facemesh.stl").getPath val facemesh = MeshIO.readMesh(new File(URLDecoder.decode(path, "UTF-8"))).get it("finds the right closest points for all the points that define the mesh") { for ((pt, id) <- facemesh.pointSet.points.zipWithIndex) { val ptWithID = facemesh.pointSet.findClosestPoint(pt) val closestPt = ptWithID.point val closestId = ptWithID.id assert(closestPt === pt) assert(closestId.id === id) } } it("finds the right closest point for a point that is not defined on the mesh") { val pts = IndexedSeq(Point(0.0, 0.0, 0.0), Point(1.0, 1.0, 1.0), Point(1.0, 1.0, 5.0)) val cells = IndexedSeq(TriangleCell(0, 1, 2)) val mesh = TriangleMesh3D(UnstructuredPointsDomain(pts), TriangleList(cells)) val newPt = Point(1.1, 1.1, 4) val ptWithID = mesh.pointSet.findClosestPoint(newPt) val closestPt = ptWithID.point val closestPtId = ptWithID.id assert(closestPtId.id === 2) assert(closestPt === pts(2)) } it("computes its area correctly for a triangle") { val pts: IndexedSeq[Point[_3D]] = IndexedSeq((0.0, 0.0, 0.0), (0.0, 1.0, 0.0), (1.0, 0.0, 0.0)) val cells = IndexedSeq(TriangleCell(0, 1, 2)) val mesh = TriangleMesh3D(UnstructuredPointsDomain(pts), TriangleList(cells)) val R = RotationSpace[_3D]((0.0, 0.0, 0.0)).transformForParameters(DenseVector(0.3, 0.4, 0.1)) val s = ScalingSpace[_3D].transformForParameters(DenseVector(2.0)) val transformedMesh = mesh.transform(R).transform(s) mesh.area should be(0.5 +- 1e-8) transformedMesh.area should be(4.0f * mesh.area +- 1e-5) // scaling by two gives 4 times the area } it("computes the right binary image for the unit sphere") { val path = getClass.getResource("/unit-sphere.stl").getPath val spheremesh = MeshIO.readMesh(new File(URLDecoder.decode(path, "UTF-8"))).get val binaryImg = spheremesh.operations.toBinaryImage binaryImg(Point(0, 0, 0)) should be(1) binaryImg(Point(2, 0, 0)) should be(0) } it("can have an empty cell list") { val pts = IndexedSeq(Point(0.0, 0.0, 0.0), Point(1.0, 1.0, 1.0), Point(1.0, 1.0, 5.0)) val cells = IndexedSeq[TriangleCell]() try { TriangleMesh3D(UnstructuredPointsDomain(pts), TriangleList(cells)) // would throw exception on fail } catch { case e: Exception => fail("It should be possible to create triangleMesh with an empty cell list") } } } }
Example 2
Source File: HistoryNotFoundPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging //如果没有指定参数,假设用户未启用事件日志记录 val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 3
Source File: ExecutorThreadDumpPage.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when // running in yarn-cluster mode. `request.getParameter("executorId")` will return // "%253Cdriver%253E". Therefore we need to decode it until we get the real id. var id = executorId var decodedId = URLDecoder.decode(id, "UTF-8") while (id != decodedId) { id = decodedId decodedId = URLDecoder.decode(id, "UTF-8") } id }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.sortWith { case (threadTrace1, threadTrace2) => { val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0 val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0 if (v1 == v2) { threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase } else { v1 > v2 } } }.map { thread => val threadName = thread.threadName val className = "accordion-heading " + { if (threadName.contains("Executor task launch")) { "executor-thread" } else { "non-executor-thread" } } <div class="accordion-group"> <div class={className} onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 4
Source File: SnapshotRemoteImpl.scala From c4proto with Apache License 2.0 | 5 votes |
package ee.cone.c4actor import java.net.{URLDecoder, URLEncoder} import java.nio.file.{Files, Paths} import java.nio.charset.StandardCharsets.UTF_8 import ee.cone.c4di.c4 @c4("ConfigSimpleSignerApp") final class SimpleSignerImpl( config: Config, idGenUtil : IdGenUtil )( fileName: String = config.get("C4AUTH_KEY_FILE") )( val salt: String = new String(Files.readAllBytes(Paths.get(fileName)),UTF_8) ) extends SimpleSigner { def sign(data: List[String], until: Long): String = { val uData = until.toString :: data val hash = idGenUtil.srcIdFromStrings(salt :: uData:_*) (hash :: uData).map(URLEncoder.encode(_,"UTF-8")).mkString("=") } def retrieve(check: Boolean): Option[String]=>Option[List[String]] = _.flatMap{ signed => val hash :: untilStr :: data = signed.split("=").map(URLDecoder.decode(_,"UTF-8")).toList val until = untilStr.toLong if(!check) Option(data) else if(until < System.currentTimeMillis) None else if(sign(data,until) == signed) Option(data) else None } } @c4("TaskSignerApp") final class SnapshotTaskSignerImpl(inner: SimpleSigner)( val url: String = "/need-snapshot" ) extends SnapshotTaskSigner { def sign(task: SnapshotTask, until: Long): String = inner.sign(List(url,task.name) ++ task.offsetOpt, until) def retrieve(check: Boolean): Option[String]=>Option[SnapshotTask] = signed => inner.retrieve(check)(signed) match { case Some(Seq(`url`,"next")) => Option(NextSnapshotTask(None)) case Some(Seq(`url`,"next", offset)) => Option(NextSnapshotTask(Option(offset))) case Some(Seq(`url`,"debug", offset)) => Option(DebugSnapshotTask(offset)) case _ => None } }
Example 5
Source File: HistoryNotFoundPage.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 6
Source File: UriUtils.scala From asura with MIT License | 5 votes |
package asura.core.http import java.net.{URLDecoder, URLEncoder} import java.nio.charset.StandardCharsets import akka.http.scaladsl.model.Uri import asura.common.exceptions.InvalidStatusException import asura.common.util.StringUtils import asura.core.es.model.HttpCaseRequest import asura.core.protocols.Protocols import asura.core.runtime.RuntimeContext import asura.core.util.StringTemplate object UriUtils { val UTF8 = StandardCharsets.UTF_8.name() def toUri(cs: HttpCaseRequest, context: RuntimeContext): Uri = { Uri.from( scheme = StringUtils.notEmptyElse(cs.request.protocol, Protocols.HTTP), host = context.renderSingleMacroAsString(URLDecoder.decode(cs.request.host, UTF8)), port = if (cs.request.port < 0 || cs.request.port > 65535) 80 else cs.request.port, path = renderPath(URLDecoder.decode(cs.request.urlPath, UTF8), cs, context), queryString = buildQueryString(cs, context) ) } def mapToQueryString(map: Map[String, Any], context: RuntimeContext = null): String = { val sb = StringBuilder.newBuilder for ((k, v) <- map) { v match { case v: String => val renderedValue = if (null != context) context.renderSingleMacroAsString(v) else v sb.append(k).append("=").append(URLEncoder.encode(renderedValue, UTF8)).append("&") case v: List[_] => v.foreach(i => { val value = i.toString val renderedValue = if (null != context) context.renderSingleMacroAsString(value) else value sb.append(k).append("=").append(URLEncoder.encode(renderedValue, UTF8)).append("&") }) } } if (sb.nonEmpty) { sb.deleteCharAt(sb.length - 1) } sb.toString } @throws[InvalidStatusException]("if path template variable not in cs") def renderPath(tpl: String, cs: HttpCaseRequest, context: RuntimeContext): String = { if (null != cs.request) { val params = cs.request.path if (null != params && params.nonEmpty) { val ctx = params.map(param => param.key -> context.renderSingleMacroAsString(param.value)).toMap StringTemplate.uriPathParse(tpl, ctx) } else { tpl } } else { tpl } } def buildQueryString(cs: HttpCaseRequest, context: RuntimeContext): Option[String] = { if (null != cs.request) { val params = cs.request.query if (null != params && params.nonEmpty) { val sb = StringBuilder.newBuilder for (param <- params if param.enabled) { val key = if (StringUtils.isNotEmpty(param.key)) { URLEncoder.encode(param.key, UTF8) } else { StringUtils.EMPTY } val value = if (StringUtils.isNotEmpty(param.value)) { URLEncoder.encode(context.renderSingleMacroAsString(param.value), UTF8) } else { StringUtils.EMPTY } sb.append(key).append("=").append(value).append("&") } if (sb.nonEmpty) { sb.deleteCharAt(sb.length - 1) } Some(sb.toString) } else { None } } else { None } } }
Example 7
Source File: package.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs import java.net.{URLDecoder, URLEncoder} import java.nio.ByteBuffer import java.nio.charset.Charset import akka.actor.{Address, AddressFromURIString} import akka.util.ByteString import com.typesafe.scalalogging.Logger import org.apache.curator.framework.CuratorFramework import org.apache.zookeeper.CreateMode import org.apache.zookeeper.KeeperException.NodeExistsException import scala.language.implicitConversions import scala.util.Try import scala.util.control.NonFatal import scala.collection.JavaConverters._ package object cluster { trait SegmentationLogic { val segmentsSize:Int def segmentation(partitionKey:ByteString): String = s"segment-${Math.abs(partitionKey.hashCode()) % segmentsSize}" def partitionZkPath(partitionKey:ByteString): String = s"/segments/${segmentation(partitionKey)}/${keyToPath(partitionKey)}" def sizeOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/$$size" def servantsOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/servants" } case class DefaultSegmentationLogic(segmentsSize:Int) extends SegmentationLogic def guarantee(path:String, data:Option[Array[Byte]], mode:CreateMode = CreateMode.EPHEMERAL) (implicit zkClient:CuratorFramework, logger:Logger):String = { try{ data match { case None => zkClient.create.withMode(mode).forPath(path) case Some(bytes) => zkClient.create.withMode(mode).forPath(path, bytes) } } catch{ case e: NodeExistsException => if(data.nonEmpty && data.get.length > 0){ zkClient.setData().forPath(path, data.get) } path case NonFatal(e) => logger.info("leader znode creation failed due to %s\n", e) path } } def safelyDiscard(path:String, recursive: Boolean = true)(implicit zkClient: CuratorFramework): String = Try { if(recursive) zkClient.getChildren.forPath(path).asScala.foreach(child => safelyDiscard(s"$path/$child", recursive)) zkClient.delete.forPath(path) path } getOrElse path def keyToPath(name:String):String = URLEncoder.encode(name, "utf-8") def pathToKey(name:String):String = URLDecoder.decode(name, "utf-8") private[cluster] val BYTES_OF_INT = Integer.SIZE / java.lang.Byte.SIZE implicit def intToBytes(integer:Int):Array[Byte] = { val buf = ByteBuffer.allocate(BYTES_OF_INT) buf.putInt(integer) buf.rewind buf.array() } val UTF_8 = Charset.forName("utf-8") implicit class ByteConversions(val bytes: Array[Byte]) extends AnyVal { def toAddress: Option[Address] = Option(bytes) flatMap (b => if (b.length <= 0) None else Some(AddressFromURIString(new String(b, UTF_8)))) def toInt: Int = ByteBuffer.wrap(bytes).getInt def toUtf8: String = new String(bytes, UTF_8) def toByteString: ByteString = ByteString(bytes) def toAddressSet: Set[Address] = Try { new String(bytes, UTF_8).split("[,]").map(seg => AddressFromURIString(seg.trim)).toSet } getOrElse Set.empty } implicit def byteStringToUtf8(bs:ByteString):String = new String(bs.toArray, UTF_8) implicit def addressToBytes(address:Address):Array[Byte] = { address.toString.getBytes(UTF_8) } implicit def addressSetToBytes(members: Set[Address]): Array[Byte] = { members.mkString(",").getBytes(UTF_8) } }
Example 8
Source File: ConversionTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.utils import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.geometry._2D import scalismo.io.{ImageIO, MeshIO} class ConversionTests extends ScalismoTestSuite { describe("a Mesh ") { it("can be converted to and from vtk") { val path = getClass.getResource("/facemesh.stl").getPath val origmesh = MeshIO.readMesh(new java.io.File(URLDecoder.decode(path, "UTF-8"))).get val vtkpd = MeshConversion.meshToVtkPolyData(origmesh) val restoredMesh = MeshConversion.vtkPolyDataToTriangleMesh(vtkpd).get origmesh should equal(restoredMesh) // test conversion with template val vtkpd2 = MeshConversion.meshToVtkPolyData(origmesh, Some(vtkpd)) val restoredMesh2 = MeshConversion.vtkPolyDataToTriangleMesh(vtkpd2).get origmesh should equal(restoredMesh2) } } describe("an 2D image") { it("can be converted to and from vtk") { val path = getClass.getResource("/lena.vtk").getPath val origimg = ImageIO.read2DScalarImage[Short](new java.io.File(URLDecoder.decode(path, "UTF-8"))).get val vtksp = ImageConversion.imageToVtkStructuredPoints(origimg) val restoredImg = ImageConversion.vtkStructuredPointsToScalarImage[_2D, Short](vtksp).get origimg should equal(restoredImg) } } describe("a tetrahedral mesh ") { it("can be converted to and from vtk") { val path = getClass.getResource("/tetraMesh.vtk").getPath val origmesh = MeshIO.readTetrahedralMesh(new java.io.File(URLDecoder.decode(path, "UTF-8"))).get val vtkug = TetrahedralMeshConversion.tetrahedralMeshToVTKUnstructuredGrid(origmesh) val restoredMesh = TetrahedralMeshConversion.vtkUnstructuredGridToTetrahedralMesh(vtkug).get origmesh should equal(restoredMesh) // test conversion with template val vtkug2 = TetrahedralMeshConversion.tetrahedralMeshToVTKUnstructuredGrid(origmesh, Some(vtkug)) val restoredMesh2 = TetrahedralMeshConversion.vtkUnstructuredGridToTetrahedralMesh(vtkug2).get origmesh should equal(restoredMesh2) } } }
Example 9
Source File: DataCollectionVolumeMeshTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.statisticalmodel.experimental.dataset import java.io.File import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.geometry.{_3D, EuclideanVector, Point} import scalismo.io.MeshIO import scalismo.registration.TranslationTransform import scalismo.statisticalmodel.dataset.DataItem import scalismo.utils.Random class DataCollectionVolumeMeshTests extends ScalismoTestSuite { implicit val rng = Random(42L) describe("A datacollection Volume Mesh") { val transformations = for (i <- 0 until 10) yield TranslationTransform(EuclideanVector(i.toDouble, 0.0, 0.0)) val dataItems = for ((t, i) <- transformations.zipWithIndex) yield DataItem(s"transformation-$i", t) val meshPath = getClass.getResource("/tetraMesh.vtu").getPath val referenceMesh = MeshIO.readTetrahedralMesh(new File(URLDecoder.decode(meshPath, "UTF-8"))).get val dataCollection = DataCollectionOfVolumeMesh(referenceMesh, dataItems) it("yields the right number of cross-validation folds") { def createFolds(nFolds: Int) = { dataCollection.createCrossValidationFolds(nFolds) } createFolds(1).size should be(1) createFolds(4).size should be(4) createFolds(2).size should be(2) dataCollection.createLeaveOneOutFolds.size should be(dataItems.size) } it("considers every dataset in a leave one out test") { val folds = dataCollection.createLeaveOneOutFolds // if we accumulated all the testing datasets, we should get all dataItems back. val accumulatedTestingData = folds.foldLeft(Seq[DataItem[_3D]]())((acc, di) => acc :+ di.testingData.dataItems(0)) val sortedAccTestData = accumulatedTestingData.sortWith((a, b) => a.info > b.info) val sortedDataItems = dataCollection.dataItems.sortWith((a, b) => a.info > b.info) sortedAccTestData should equal(sortedDataItems) } it("yields the right fold sizes for a leave one out test") { for (fold <- dataCollection.createLeaveOneOutFolds) { fold.trainingData.size should be(dataCollection.size - 1) fold.testingData.size should be(1) fold.trainingData.dataItems.contains(fold.testingData) should be(false) } } it("has all distinct training datasets in a leave one out test") { val folds = dataCollection.createLeaveOneOutFolds for (fold <- folds) { fold.trainingData.dataItems.toSet.size should be(fold.trainingData.size) } } it("returns a mean surface, which is the arithmetic mean of the meshes represented by the collection") { val computedMean = dataCollection.meanSurface val meshes = dataCollection.dataItems.map(di => dataCollection.reference.transform(di.transformation)) for (pointId <- util.Random.shuffle(dataCollection.reference.pointSet.pointIds.toIndexedSeq).take(100)) { val pointsOnMeshes = meshes.map(mesh => mesh.pointSet.point(pointId)) val meanOfPoint = pointsOnMeshes.foldLeft(Point(0, 0, 0))((acc, p) => acc + p.toVector / pointsOnMeshes.size) (computedMean.pointSet.point(pointId) - meanOfPoint).norm should be < 1e-5 } } } }
Example 10
Source File: StatisticalVolumeModelTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.statisticalmodel.experimental import java.io.File import java.net.URLDecoder import breeze.linalg.DenseVector import breeze.stats.distributions.Gaussian import scalismo.ScalismoTestSuite import scalismo.geometry.{_3D, Point} import scalismo.io.StatismoIO import scalismo.registration.{RigidTransformation, RigidTransformationSpace} import scalismo.utils.Random class StatisticalVolumeModelTests extends ScalismoTestSuite { implicit val random = Random(42) implicit def doubleToFloat(d: Double): Float = d.toFloat describe("A statistical Volume mesh model") { def compareModels(oldModel: StatisticalVolumeMeshModel, newModel: StatisticalVolumeMeshModel) { for (i <- 0 until 10) { val standardNormal = Gaussian(0, 1)(random.breezeRandBasis) val coeffsData = standardNormal.sample(oldModel.rank) val coeffs = DenseVector(coeffsData.toArray) val inst = oldModel.instance(coeffs) val instNew = newModel.instance(coeffs) inst.pointSet.points .zip(instNew.pointSet.points) .foreach { case (pt1, pt2) => (pt1.toVector - pt2.toVector).norm should be(0.0 +- (0.1)) } } } it("can be transformed forth and back and yield the same deformations") { val path = getClass.getResource("/TetraMeshModel2.h5").getPath val model = StatismoIO.readStatismoVolumeMeshModel(new File(URLDecoder.decode(path))).get val parameterVector = DenseVector[Double](1.5, 1.0, 3.5, Math.PI, -Math.PI / 2.0, -Math.PI) val rigidTransform = RigidTransformationSpace[_3D]().transformForParameters(parameterVector) val inverseTransform = rigidTransform.inverse.asInstanceOf[RigidTransformation[_3D]] val transformedModel = model.transform(rigidTransform) val newModel = transformedModel.transform(inverseTransform) compareModels(model, newModel) } it("can change the mean shape and still yield the same shape space") { val path = getClass.getResource("/TetraMeshModel2.h5").getPath val model = StatismoIO.readStatismoVolumeMeshModel(new File(URLDecoder.decode(path))).get val newMesh = model.sample def t(pt: Point[_3D]): Point[_3D] = { val ptId = model.referenceVolumeMesh.pointSet.findClosestPoint(pt).id newMesh.pointSet.point(ptId) } val newModel = model.changeReference(t) compareModels(model, newModel) } } }
Example 11
Source File: ActiveShapeModelTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.statisticalmodel import java.io.File import java.net.URLDecoder import breeze.linalg.DenseVector import scalismo.ScalismoTestSuite import scalismo.geometry.{_3D, Point} import scalismo.io.{ImageIO, MeshIO, StatismoIO} import scalismo.mesh.{MeshMetrics, TriangleMesh} import scalismo.numerics.{Sampler, UniformMeshSampler3D} import scalismo.registration.LandmarkRegistration import scalismo.statisticalmodel.asm._ import scalismo.statisticalmodel.dataset.DataCollection import scalismo.utils.Random class ActiveShapeModelTests extends ScalismoTestSuite { describe("An active shape model") { implicit val random = Random(42) object Fixture { val imagePreprocessor = GaussianGradientImagePreprocessor(0.1f) // number of points should usually be an odd number, so that the profiles are centered on the profiled points val featureExtractor = NormalDirectionFeatureExtractor(numberOfPoints = 5, spacing = 1.0) def samplerPerMesh(mesh: TriangleMesh[_3D]): Sampler[_3D] = UniformMeshSampler3D(mesh, numberOfPoints = 1000) val searchMethod = NormalDirectionSearchPointSampler(numberOfPoints = 31, searchDistance = 6) val fittingConfig = FittingConfiguration(featureDistanceThreshold = 2.0, pointDistanceThreshold = 3.0, modelCoefficientBounds = 3.0) val path: String = URLDecoder.decode(getClass.getResource(s"/asmData/model.h5").getPath, "UTF-8") val shapeModel = StatismoIO.readStatismoMeshModel(new File(path)).get val nbFiles = 7 // use iterators so files are only loaded when required (and memory can be reclaimed after use) val meshes = (0 until nbFiles).toIterator map { i => val meshPath: String = getClass.getResource(s"/asmData/$i.stl").getPath MeshIO.readMesh(new File(URLDecoder.decode(meshPath, "UTF-8"))).get } val images = (0 until nbFiles).toIterator map { i => val imgPath: String = getClass.getResource(s"/asmData/$i.vtk").getPath ImageIO.read3DScalarImage[Float](new File(URLDecoder.decode(imgPath, "UTF-8"))).get } val targetImage = images.next() val targetMesh = meshes.next() val trainMeshes = meshes val trainImages = images val dc = DataCollection.fromMeshSequence(shapeModel.referenceMesh, trainMeshes.toIndexedSeq)._1.get val trainingData = trainImages zip dc.dataItems.toIterator.map(_.transformation) val asm = ActiveShapeModel.trainModel(shapeModel, trainingData, imagePreprocessor, featureExtractor, samplerPerMesh) // align the model val alignment = LandmarkRegistration.rigid3DLandmarkRegistration( (asm.statisticalModel.mean.pointSet.points zip targetMesh.pointSet.points).toIndexedSeq, Point(0, 0, 0) ) val alignedASM = asm.transform(alignment) } it("Can be built, transformed and correctly fitted from/to artificial data") { val fit = Fixture.alignedASM.fit(Fixture.targetImage, Fixture.searchMethod, 20, Fixture.fittingConfig).get.mesh assert(MeshMetrics.diceCoefficient(fit, Fixture.targetMesh) > 0.94) } it("Can be transformed correctly from within the fitting") { val nullInitialParameters = DenseVector.zeros[Double](Fixture.asm.statisticalModel.rank) val fit = Fixture.asm .fit(Fixture.targetImage, Fixture.searchMethod, 20, Fixture.fittingConfig, ModelTransformations(nullInitialParameters, Fixture.alignment)) .get .mesh assert(MeshMetrics.diceCoefficient(fit, Fixture.targetMesh) > 0.95) } } }
Example 12
Source File: MeshLineIntersectionTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.mesh import java.io.File import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.geometry.EuclideanVector3D import scalismo.io.MeshIO import scalismo.utils.Random import scala.language.implicitConversions class MeshLineIntersectionTests extends ScalismoTestSuite { implicit val rng: Random = Random(1024L) describe("A intersection of a line with a tetrahedral mesh") { object Fixture { val path = getClass.getResource("/tetraMesh.vtk").getPath val testMesh = MeshIO.readTetrahedralMesh(new File(URLDecoder.decode(path, "UTF-8"))).get } it("should contain a known intersection point") { val mesh = Fixture.testMesh for (_ <- 0 until 100) { val tetId = TetrahedronId(rng.scalaRandom.nextInt(mesh.tetrahedralization.tetrahedrons.size)) // an intersection point will lie within the triangle val tet = mesh.tetrahedralization.tetrahedron(tetId) val tri = tet.triangles(rng.scalaRandom.nextInt(4)) val bc3 = BarycentricCoordinates.randomUniform val v = mesh.pointSet.point(tri.ptId1).toVector * bc3.a + mesh.pointSet.point(tri.ptId2).toVector * bc3.b + mesh.pointSet.point(tri.ptId3).toVector * bc3.c val intersectionPoint = v.toPoint // random direction val direction = EuclideanVector3D( rng.scalaRandom.nextGaussian(), rng.scalaRandom.nextGaussian(), rng.scalaRandom.nextGaussian() ).normalize // select point on the line given by direction an intersection point val anchorPoint = intersectionPoint + direction * rng.scalaRandom.nextDouble() * 100 // try to find the intersection point val intersections = mesh.operations.getIntersectionPoints(anchorPoint, direction) val distances = intersections.map(ip => (ip - intersectionPoint).norm) val closestDistanceToTrueIntersectionPoint = distances.min closestDistanceToTrueIntersectionPoint should be < 1.0e-8 } } } }
Example 13
Source File: RepairFromMessages.scala From nexus-iam with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.iam import java.net.URLDecoder import akka.actor.ActorSystem import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.persistence.query.PersistenceQuery import ch.epfl.bluebrain.nexus.iam.acls.Acls import ch.epfl.bluebrain.nexus.iam.permissions.Permissions import ch.epfl.bluebrain.nexus.iam.realms.Realms import ch.epfl.bluebrain.nexus.iam.types.Label import ch.epfl.bluebrain.nexus.rdf.Iri.Path import com.typesafe.scalalogging.Logger import monix.eval.Task import monix.execution.Scheduler import monix.execution.schedulers.CanBlock import scala.concurrent.Future object RepairFromMessages { // $COVERAGE-OFF$ private val log = Logger[RepairFromMessages.type] def repair( p: Permissions[Task], r: Realms[Task], a: Acls[Task] )(implicit as: ActorSystem, sc: Scheduler, pm: CanBlock): Unit = { val pq = PersistenceQuery(as).readJournalFor[CassandraReadJournal](CassandraReadJournal.Identifier) pq.currentPersistenceIds() .mapAsync(1) { case PermissionsId() => p.agg.currentState(p.persistenceId).runToFuture case RealmId(label) => r.agg.currentState(label.value).runToFuture case AclId(path) => a.agg.currentState(path.asString).runToFuture case other => log.warn(s"Unknown persistence id '$other'") Future.successful(()) } .runFold(0) { case (acc, _) => if (acc % 100 == 0) log.info(s"Processed '$acc' persistence ids.") acc + 1 } .runSyncDiscard() log.info("Repair from messages table completed.") } sealed abstract class PersistenceId(prefix: String) { private val len = prefix.length protected def dropPrefix(arg: String): Option[String] = if (arg.startsWith(prefix)) Some(arg.drop(len)) else None } object RealmId extends PersistenceId("realms-") { def unapply(arg: String): Option[Label] = dropPrefix(arg).map(Label.unsafe) } object AclId extends PersistenceId("acls-") { def unapply(arg: String): Option[Path] = dropPrefix(arg).flatMap(str => Path(URLDecoder.decode(str, "UTF-8")).toOption) } object PermissionsId { def unapply(arg: String): Boolean = arg == "permissions-permissions" } implicit class RichFuture[A](val future: Future[A]) extends AnyVal { def runSyncDiscard()(implicit s: Scheduler, permit: CanBlock): Unit = Task.fromFuture(future).map(_ => ()).runSyncUnsafe() } // $COVERAGE-ON$ }
Example 14
Source File: RegionQueryTest.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.mesh import java.io.File import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.common.{BoxDomain, UnstructuredPointsDomain} import scalismo.geometry._ import scalismo.image.DiscreteImageDomain import scalismo.io.MeshIO class RegionQueryTest extends ScalismoTestSuite { val path = getClass.getResource("/facemesh.stl").getPath val mesh = MeshIO.readMesh(new File(URLDecoder.decode(path, "UTF-8"))).get val translationLength = 1.0 val translatedMesh = mesh.transform((pt: Point[_3D]) => pt + EuclideanVector(translationLength, 0.0, 0.0)) describe("The KD-tree region query") { it("finds points in the 2D bounding box region") { val img = DiscreteImageDomain(Point(0, 0), EuclideanVector(1, 1), IntVector(10, 10)) val bigBox = img.boundingBox val dom = UnstructuredPointsDomain[_2D](img.points.toIndexedSeq) // Smaller Region val o = bigBox.origin val e = bigBox.extent * 0.5 val nBox = BoxDomain(o, o + e) val pts = dom.findPointsInRegion(nBox).map(_.point) val groundTruth = dom.points.filter(p => nBox.isDefinedAt(p)).toSeq assert(groundTruth.forall(p => pts.contains(p))) assert(pts.forall(p => groundTruth.contains(p))) } it("finds points in the 3D bounding box region") { val bigBox = mesh.boundingBox //Smaller Region val o = bigBox.origin val e = bigBox.extent * 0.5 val nBox = BoxDomain(o, o + e) val pts = mesh.pointSet.findPointsInRegion(nBox).map(_.point) val groundTruth = mesh.pointSet.points.filter(p => nBox.isDefinedAt(p)).toSeq assert(groundTruth.forall(p => pts.contains(p))) assert(pts.forall(p => groundTruth.contains(p))) } } }
Example 15
Source File: MeshDecimationTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.mesh import java.io.File import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.io.MeshIO class MeshDecimationTests extends ScalismoTestSuite { describe("A decimated mesh") { val path = getClass.getResource("/facemesh.stl").getPath val facemesh = MeshIO.readMesh(new File(URLDecoder.decode(path, "UTF-8"))).get it("has a reduced number of points") { val reducedMesh = facemesh.operations.decimate(facemesh.pointSet.numberOfPoints / 3) val reductionRatio = reducedMesh.pointSet.numberOfPoints / facemesh.pointSet.numberOfPoints.toDouble reductionRatio should be(0.3 +- 0.1) } it("has approximately preserves the surface") { val reducedMesh = facemesh.operations.decimate(facemesh.pointSet.numberOfPoints / 2) MeshMetrics.hausdorffDistance(reducedMesh, facemesh) < 1.0 } } }
Example 16
Source File: ResampleTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.image import java.io.File import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.common.PointId import scalismo.common.interpolation.{BSplineImageInterpolator2D, BSplineImageInterpolator3D} import scalismo.io.ImageIO class ResampleTests extends ScalismoTestSuite { describe("Resampling a 2D image") { val testImgUrl = getClass.getResource("/lena.vtk").getPath val discreteImage = ImageIO.read2DScalarImage[Short](new File(URLDecoder.decode(testImgUrl, "UTF-8"))).get // here we do 1st order interpolation. 3rd order would not work, as it does not necessarily preserve the // pixel values at the strong edges - and we thus could not formulate a reasonable test val continuousImage = discreteImage.interpolate(BSplineImageInterpolator2D[Short](1)) it("yields the original discrete image") { val resampledImage = continuousImage.sample(discreteImage.domain, 0) discreteImage.values.size should equal(resampledImage.values.size) for (i <- 0 until discreteImage.values.size) { discreteImage(PointId(i)) should be(resampledImage(PointId(i))) } } } describe("Resampling a 3D image") { val path = getClass.getResource("/3dimage.nii").getPath val discreteImage = ImageIO.read3DScalarImage[Short](new File(URLDecoder.decode(path, "UTF-8"))).get val continuousImage = discreteImage.interpolate(BSplineImageInterpolator3D[Short](0)) it("yields the original discrete image") { val resampledImage = continuousImage.sample(discreteImage.domain, 0) for (i <- 0 until discreteImage.values.size by 100) { discreteImage(PointId(i)) should be(resampledImage(PointId(i))) } } } }
Example 17
Source File: StatisticalVolumeMeshModelIOTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.io.experimental import java.io.File import java.net.URLDecoder import scalismo.ScalismoTestSuite import scalismo.io.experimental import scalismo.statisticalmodel.experimental.StatisticalVolumeMeshModel class StatisticalVolumeMeshModelIOTest extends ScalismoTestSuite { describe("a Statismo Mesh volume Model") { def assertModelAlmostEqual(model1: StatisticalVolumeMeshModel, model2: StatisticalVolumeMeshModel): Unit = { assert(model1.mean == model2.mean) assert(breeze.linalg.norm(model1.gp.variance - model2.gp.variance) < 1e-5) assert(breeze.linalg.sum(model1.gp.basisMatrix - model2.gp.basisMatrix) < 1e-5) } it("can be written and read again") { val statismoFile = new File(URLDecoder.decode(getClass.getResource("/TetraMeshModel2.h5").getPath, "UTF-8")) val dummyFile = File.createTempFile("dummy", "h5") dummyFile.deleteOnExit() val t = for { model <- experimental.StatismoIO.readStatismoVolumeMeshModel(statismoFile) _ <- experimental.StatismoIO.writeStatismoVolumeMeshModel(model, dummyFile) readModel <- experimental.StatismoIO.readStatismoVolumeMeshModel(dummyFile) } yield { assertModelAlmostEqual(model, readModel) } t.get } it("can be written and read again in non-standard location") { val statismoFile = new File(URLDecoder.decode(getClass.getResource("/TetraMeshModel2.h5").getPath, "UTF-8")) val dummyFile = File.createTempFile("dummy", "h5") dummyFile.deleteOnExit() val t = for { model <- experimental.StatismoIO.readStatismoVolumeMeshModel(statismoFile) _ <- experimental.StatismoIO.writeStatismoVolumeMeshModel(model, dummyFile, "/someLocation") readModel <- experimental.StatismoIO.readStatismoVolumeMeshModel(dummyFile, "/someLocation") } yield { assertModelAlmostEqual(model, readModel) } t.get } it("can be written in version 0.81 and read again") { import scalismo.io.experimental.StatismoIO.StatismoVersion.v081 val statismoFile = new File(URLDecoder.decode(getClass.getResource("/TetraMeshModel2.h5").getPath, "UTF-8")) val dummyFile = File.createTempFile("dummy", "h5") dummyFile.deleteOnExit() val t = for { model <- experimental.StatismoIO.readStatismoVolumeMeshModel(statismoFile) _ <- experimental.StatismoIO.writeStatismoVolumeMeshModel(model, dummyFile, statismoVersion = v081) readModel <- experimental.StatismoIO.readStatismoVolumeMeshModel(dummyFile) } yield { assertModelAlmostEqual(model, readModel) } t.get } } }
Example 18
Source File: LandmarkIOTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.io import java.io.{ByteArrayOutputStream, File, InputStream} import java.net.URLDecoder import breeze.linalg.DenseVector import scalismo.ScalismoTestSuite import scalismo.geometry._ import scalismo.statisticalmodel.MultivariateNormalDistribution import scala.io.Source import scala.language.implicitConversions import scala.collection.immutable.Seq class LandmarkIOTests extends ScalismoTestSuite { implicit def doubleToFloat(d: Double): Float = d.toFloat implicit def inputStreamToSource(s: InputStream): Source = Source.fromInputStream(s) describe("Spray LandmarkIO") { val csvName = "/landmarks.csv" def csvStream() = getClass.getResourceAsStream(csvName) val jsonName = "/landmarks.json" def jsonStream() = getClass.getResourceAsStream(jsonName) def distWithDefaultVectors(d1: Double, d2: Double, d3: Double): MultivariateNormalDistribution = { val axes = List(DenseVector[Double](1, 0, 0), DenseVector[Double](0, 1, 0), DenseVector[Double](0, 0, 1)) val devs = List(d1, d2, d3) val data = axes zip devs MultivariateNormalDistribution(DenseVector[Double](0, 0, 0), data) } val jsonLm1 = Landmark("one", Point(1, 2, 3)) val jsonLm2 = Landmark("two", Point(2, 3, 4), Some("Landmark two"), Some(distWithDefaultVectors(1, 4, 9))) val jsonLms = List(jsonLm1, jsonLm2) it("can serialize and deserialize simple landmarks using JSON") { val out = new ByteArrayOutputStream() LandmarkIO.writeLandmarksJsonToStream(jsonLms, out) val written = new String(out.toByteArray) val read = LandmarkIO.readLandmarksJsonFromSource[_3D](Source.fromString(written)).get read should equal(jsonLms) } it("can read simple landmarks from a JSON Stream") { val read = LandmarkIO.readLandmarksJsonFromSource[_3D](jsonStream()).get read should equal(jsonLms) } } }
Example 19
Source File: ActiveShapeModelIOTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.io import java.io.File import java.net.URLDecoder import breeze.linalg.{DenseMatrix, DenseVector} import scalismo.ScalismoTestSuite import scalismo.numerics.FixedPointsUniformMeshSampler3D import scalismo.statisticalmodel.MultivariateNormalDistribution import scalismo.statisticalmodel.asm._ import scalismo.utils.Random import scala.collection.immutable class ActiveShapeModelIOTests extends ScalismoTestSuite { implicit val rng = Random(42L) private def createTmpH5File(): File = { val f = File.createTempFile("hdf5file", ".h5") f.deleteOnExit() f } private def createAsm(): ActiveShapeModel = { val statismoFile = new File(URLDecoder.decode(getClass.getResource("/facemodel.h5").getPath, "UTF-8")) val shapeModel = StatismoIO.readStatismoMeshModel(statismoFile).get val (sprofilePoints, _) = new FixedPointsUniformMeshSampler3D(shapeModel.referenceMesh, 100).sample.unzip val pointIds = sprofilePoints.map { point => shapeModel.referenceMesh.pointSet.findClosestPoint(point).id } val dists = for (i <- pointIds.indices) yield new MultivariateNormalDistribution(DenseVector.ones[Double](3) * i.toDouble, DenseMatrix.eye[Double](3) * i.toDouble) val profiles = new Profiles(pointIds.to[immutable.IndexedSeq].zip(dists).map { case (i, d) => Profile(i, d) }) new ActiveShapeModel(shapeModel, profiles, GaussianGradientImagePreprocessor(1), NormalDirectionFeatureExtractor(1, 1)) } describe("An active shape model") { it("can be written to disk and read again") { val originalAsm = createAsm() val h5file = createTmpH5File() ActiveShapeModelIO.writeActiveShapeModel(originalAsm, h5file).get val newAsm = ActiveShapeModelIO.readActiveShapeModel(h5file).get newAsm should equal(originalAsm) h5file.delete() } } }
Example 20
Source File: Launcher.scala From slab with Apache License 2.0 | 5 votes |
// Example: A Slab server // // Guide for creating a Slab server package com.criteo.slab.example import java.net.URLDecoder import cats.effect.IO import com.criteo.slab.app.StateService.NotFoundError import com.criteo.slab.app.WebServer import com.criteo.slab.lib.InMemoryStore import lol.http._ import org.slf4j.LoggerFactory object Launcher { import SimpleBoard._ import scala.concurrent.ExecutionContext.Implicits.global private val logger = LoggerFactory.getLogger(this.getClass) def main(args: Array[String]): Unit = { require(args.length == 1, "you must supply a port!") val port = args(0).toInt // You should provide codec for checked value types for values to be persistent in a store import InMemoryStore.codec // Define a value store for uploading and restoring history implicit val store = new InMemoryStore // Create a web server WebServer() // You can define custom routes, Slab web server is built with [lolhttp](https://github.com/criteo/lolhttp) .withRoutes(stateService => { case GET at "/api/heartbeat" => Ok("ok") case GET at url"/api/boards/$board/status" => IO.fromFuture(IO( stateService .current(URLDecoder.decode(board, "UTF-8")).map(view => Ok(view.status.name)) .recover { case NotFoundError(message) => NotFound(message) case e => logger.error(e.getMessage, e) InternalServerError } )) }) // Attach a board to the server .attach(board) // Launch the server at port .apply(port) } }
Example 21
Source File: RepairFromMessages.scala From nexus-kg with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg import java.net.URLDecoder import java.util.UUID import akka.actor.ActorSystem import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.persistence.query.PersistenceQuery import ch.epfl.bluebrain.nexus.kg.resources.{Id, Repo, ResId} import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef import ch.epfl.bluebrain.nexus.rdf.Iri import com.typesafe.scalalogging.Logger import monix.eval.Task import monix.execution.Scheduler import monix.execution.schedulers.CanBlock import scala.concurrent.Future import scala.util.Try object RepairFromMessages { // $COVERAGE-OFF$ private val log = Logger[RepairFromMessages.type] def repair(repo: Repo[Task])(implicit as: ActorSystem, sc: Scheduler, pm: CanBlock): Unit = { log.info("Repairing dependent tables from messages.") val pq = PersistenceQuery(as).readJournalFor[CassandraReadJournal](CassandraReadJournal.Identifier) Task .fromFuture { pq.currentPersistenceIds() .mapAsync(1) { case ResourceId(id) => (repo.get(id, None).value >> Task.unit).runToFuture case other => log.warn(s"Unknown persistence id '$other'") Future.successful(()) } .runFold(0) { case (acc, _) => if (acc % 1000 == 0) log.info(s"Processed '$acc' persistence ids.") acc + 1 } .map(_ => ()) } .runSyncUnsafe() log.info("Finished repairing dependent tables from messages.") } object ResourceId { private val regex = "^resources\\-([0-9a-fA-F]{8}\\-[0-9a-fA-F]{4}\\-[0-9a-fA-F]{4}\\-[0-9a-fA-F]{4}\\-[0-9a-fA-F]{12})\\-(.+)$".r def unapply(arg: String): Option[ResId] = arg match { case regex(stringUuid, stringId) => for { uuid <- Try(UUID.fromString(stringUuid)).toOption iri <- Iri.absolute(URLDecoder.decode(stringId, "UTF-8")).toOption } yield Id(ProjectRef(uuid), iri) case _ => None } } // $COVERAGE-ON$ }
Example 22
Source File: WebSocketServerProcessor.scala From ez-framework with Apache License 2.0 | 5 votes |
package com.ecfront.ez.framework.service.gateway import java.net.URLDecoder import com.ecfront.common.JsonHelper import com.ecfront.ez.framework.core.rpc.Method import com.ecfront.ez.framework.service.gateway.interceptor.EZAPIContext import io.vertx.core.Handler import io.vertx.core.http._ class WebSocketServerProcessor extends Handler[ServerWebSocket] with GatewayProcessor { override def handle(request: ServerWebSocket): Unit = { val ip = if (request.headers().contains(FLAG_PROXY) && request.headers.get(FLAG_PROXY).nonEmpty) { request.headers.get(FLAG_PROXY) } else { request.remoteAddress().host() } logger.trace(s"Receive a request [${request.uri()}] , from $ip ") try { router(request, ip) } catch { case ex: Throwable => logger.error("WS process error.", ex) request.writeFinalTextFrame("Request process error:${ex.getMessage}") } } private def router(request: ServerWebSocket, ip: String): Unit = { val parameters = if (request.query() != null && request.query().nonEmpty) { URLDecoder.decode(request.query(), "UTF-8").split("&").map { item => val entry = item.split("=") entry(0) -> entry(1) }.toMap } else { Map[String, String]() } val result = LocalCacheContainer.getRouter(Method.WS.toString, request.path(), parameters, ip) WebSocketMessagePushManager.createWS(result._3, request) if (result._1) { val context = new EZAPIContext() context.remoteIP = ip context.method = Method.WS.toString context.templateUri = result._3 context.realUri = request.uri() context.parameters = result._2 context.accept = "" context.contentType = "" request.frameHandler(new Handler[WebSocketFrame] { override def handle(event: WebSocketFrame): Unit = { execute(request, event.textData(), context) } }) } else { request.writeFinalTextFrame(JsonHelper.toJsonString(result._1)) } } private def execute(request: ServerWebSocket, body: String, context: EZAPIContext): Unit = { execute(body, context, { resp => WebSocketMessagePushManager.ws(context.templateUri, resp.body._1.executeResult) }) } }
Example 23
Source File: ExecutorService.scala From sparkplug with MIT License | 5 votes |
package springnz.sparkplug.executor import java.net.{ URLDecoder, URLEncoder } import java.time.LocalDate import akka.actor._ import com.typesafe.config.ConfigFactory import springnz.sparkplug.core._ import springnz.sparkplug.util.Logging import scala.util.{ Properties, Try } object Constants { val defaultAkkaRemoteConfigSection = "akkaRemote" val actorSystemName = "sparkplugExecutorSystem" val brokerActorName = "sparkplugRequestBroker" } object ExecutorService extends Logging { import Constants._ lazy val defaultRemoteAkkaConfig = ConfigFactory.load.getConfig(s"sparkplug.$defaultAkkaRemoteConfigSection") // TODO: proper command line parsing to allow richer config options def main(args: Array[String]): Unit = { if (args.length < 4) throw new IllegalArgumentException(s"Expected at least 4 arguments to ExecutorService. Args = : ${args.toList}") val appName = args(1) val sparkClientPath = args(3) log.info(s"Starting Sparkplug ExecutorService: SparkClient = $sparkClientPath: ${LocalDate.now()}") val remoteConfig = if (args.length == 6) { val urlEncodedConfig = args(5) val configString = URLDecoder.decode(urlEncodedConfig, "UTF-8") val config = ConfigFactory.parseString(configString) log.info(s"Using akka remote config:\n$configString") config } else { log.info(s"Using default akka remote config from config section 'sparkplug.$defaultAkkaRemoteConfigSection'") defaultRemoteAkkaConfig } import scala.collection.JavaConversions._ def env = System.getenv().toMap log.debug(s"Environment:\n $env") val system = ActorSystem(actorSystemName, remoteConfig) val executorService = new ExecutorService(appName) executorService.start(system, sparkClientPath) log.info("Terminating the remote application.") } } class ExecutorService(appName: String, brokerName: String = Constants.brokerActorName) extends LongLivedExecutor with Logging { // Note that the SparkConf inherits all its settings from spark-submit override val configurer: Configurer = new LocalConfigurer(appName, Properties.envOrNone("SPARK_MASTER"), None) def start(system: ActorSystem, sparkClientPath: String): Try[Unit] = { val actorOperation = SparkOperation[Unit] { implicit sparkContext ⇒ def postStopAction() = { log.info("Cancelling any jobs (if any are running).") sparkContext.cancelAllJobs() log.info("Stopping Spark context.") sparkContext.stop() } log.info("Creating requestBroker for ExecutorService.") system.actorOf(Props(new RequestBroker(sparkClientPath, postStopAction)), name = brokerName) } log.info("Executing container operation (everything happens inside this method).") val result = execute(actorOperation) log.info("Finished executing container operation (everything happens inside this method).") result } }
Example 24
Source File: UrlencodedData.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.internal import java.net.{URLDecoder, URLEncoder} import java.nio.charset.Charset private[tapir] object UrlencodedData { def decode(s: String, charset: Charset): Seq[(String, String)] = { s.split("&") .toList .flatMap(kv => kv.split("=", 2) match { case Array(k, v) => Some((URLDecoder.decode(k, charset.toString), URLDecoder.decode(v, charset.toString))) case _ => None } ) } def encode(s: Seq[(String, String)], charset: Charset): String = { s.map { case (k, v) => s"${URLEncoder.encode(k, charset.toString)}=${URLEncoder.encode(v, charset.toString)}" } .mkString("&") } }
Example 25
Source File: DependencyNode.scala From cuesheet with Apache License 2.0 | 5 votes |
package com.kakao.cuesheet.deps import java.io.{BufferedOutputStream, File, FileOutputStream, IOException} import java.net.{URL, URLDecoder} import java.nio.file.{Files, Paths} import java.util.zip.{ZipEntry, ZipOutputStream} import com.kakao.mango.io.FileSystems import com.kakao.mango.logging.Logging import com.kakao.shaded.guava.io.Files.createTempDir sealed trait DependencyNode { def path: String } case class ManagedDependency(group: String, artifact: String, classifier: String = "jar") case class ManagedDependencyNode( path: String, group: String, artifact: String, classifier: String, version: String, children: Seq[ManagedDependency] ) extends DependencyNode { def key = ManagedDependency(group, artifact, classifier) } case class DirectoryDependencyNode(path: String) extends DependencyNode with Logging { lazy val compressed: UnmanagedDependencyNode = { val tmpdir = createTempDir() val jar = new File(s"${tmpdir.getAbsolutePath}/local-${tmpdir.getName}.jar") val root = Paths.get(path) val output = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(jar))) var count = 0 FileSystems.entries(root).foreach { path => if (resourceExtensions.exists(path.toString.endsWith)) { val entry = new ZipEntry(root.relativize(path).toString) output.putNextEntry(entry) try { Files.copy(path, output) count += 1 } catch { case e: IOException => logger.warn(s"skipping $path due to an IOException: ${e.getMessage}") } output.closeEntry() } } output.close() logger.debug(s"Successfully zipped $count files in $path into $jar") UnmanagedDependencyNode(jar.getAbsolutePath) } } case class JavaRuntimeDependencyNode(path: String) extends DependencyNode case class UnmanagedDependencyNode(path: String) extends DependencyNode object DependencyNode { val resolver = new ChainedArtifactResolver( new IvyPathArtifactResolver, new IvyOriginalPathArtifactResolver, new MavenPathArtifactResolver, new GradlePathArtifactResolver, new JavaRuntimeResolver, new MavenMetadataArtifactResolver, new UnmanagedJarResolver ) def resolve(url: URL): DependencyNode = { if (url.getProtocol != "file") { throw new IllegalArgumentException("non-file dependency is not supported") } val path = URLDecoder.decode(url.getFile, "UTF-8") val file = new File(path) if (file.isDirectory) { return DirectoryDependencyNode(file.getAbsolutePath) } if (!file.isFile || !file.canRead) { throw new IllegalArgumentException(s"$path is not a file or readable") } DependencyNode.resolver.resolve(file.getAbsolutePath) match { case Some(node) => node case None => throw new IllegalArgumentException(s"Could not determine the dependency of $path") } } }
Example 26
Source File: UrlUtils.scala From incubator-s2graph with Apache License 2.0 | 5 votes |
package org.apache.s2graph.s2jobs.wal.utils import java.net.{URI, URLDecoder} import scala.util.matching.Regex object UrlUtils { val pattern = new Regex("""(\\x[0-9A-Fa-f]{2}){3}""") val koreanPattern = new scala.util.matching.Regex("([가-힣]+[\\-_a-zA-Z 0-9]*)+|([\\-_a-zA-Z 0-9]+[가-힣]+)") // url extraction functions def urlDecode(url: String): (Boolean, String) = { try { val decoded = URLDecoder.decode(url, "UTF-8") (url != decoded, decoded) } catch { case e: Exception => (false, url) } } def hex2String(url: String): String = { pattern replaceAllIn(url, m => { new String(m.toString.replaceAll("[^0-9A-Fa-f]", "").sliding(2, 2).toArray.map(Integer.parseInt(_, 16).toByte), "utf-8") }) } def toDomains(url: String, maxDepth: Int = 3): Seq[String] = { val uri = new URI(url) val domain = uri.getHost if (domain == null) Nil else { val paths = uri.getPath.split("/") if (paths.isEmpty) Seq(domain) else { val depth = Math.min(maxDepth, paths.size) (1 to depth).map { ith => domain + paths.take(ith).mkString("/") } } } } def extract(_url: String): (String, Seq[String], Option[String]) = { try { val url = hex2String(_url) val (encoded, decodedUrl) = urlDecode(url) val kwdOpt = koreanPattern.findAllMatchIn(decodedUrl).toList.map(_.group(0)).headOption.map(_.replaceAll("\\s", "")) val domains = toDomains(url.replaceAll(" ", "")) (decodedUrl, domains, kwdOpt) } catch { case e: Exception => (_url, Nil, None) } } }
Example 27
Source File: WikipediaToDBpediaClosure.scala From dbpedia-spotlight-model with Apache License 2.0 | 5 votes |
package org.dbpedia.spotlight.db import org.semanticweb.yars.nx.parser.NxParser import java.io.InputStream import org.dbpedia.spotlight.log.SpotlightLog import collection.immutable.ListSet import scala.Predef._ import org.dbpedia.spotlight.exceptions.NotADBpediaResourceException import java.net.URLDecoder import org.dbpedia.spotlight.model.SpotlightConfiguration import org.dbpedia.extraction.util.WikiUtil import scala.collection.mutable.ListBuffer def wikipediaToDBpediaURI(url: String): String = { val uri = if(url.startsWith("http:")) { getEndOfChainURI(decodedNameFromURL(url)) } else { getEndOfChainURI(decodeURL(url)) } if (disambiguationsSet.contains(uri) || uri == null) throw new NotADBpediaResourceException("Resource is a disambiguation page.") else uri } def getEndOfChainURI(uri: String): String = getEndOfChainURI(uri, Set(uri)) private def getEndOfChainURI(uri: String, alreadyTraversed:Set[String]): String = linkMap.get(uri) match { case Some(s: String) => if (alreadyTraversed.contains(s)) uri else getEndOfChainURI(s, alreadyTraversed + s) case None => uri } }
Example 28
Source File: FilterRecursiveListBucketHandler.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.handler import java.net.URLDecoder import akka.NotUsed import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.stream.alpakka.xml.scaladsl.{ XmlParsing, XmlWriting } import akka.stream.alpakka.xml.{ EndElement, ParseEvent, StartElement, TextEvent } import akka.stream.scaladsl.Flow import akka.util.ByteString import com.ing.wbaa.rokku.proxy.data.{ Read, RequestId, S3Request, User } import scala.collection.immutable import scala.collection.mutable.ListBuffer protected[this] def filterRecursiveListObjects(user: User, requestS3: S3Request)(implicit id: RequestId): Flow[ByteString, ByteString, NotUsed] = { def elementResult(allContentsElements: ListBuffer[ParseEvent], isContentsTag: Boolean, element: ParseEvent): immutable.Seq[ParseEvent] = { if (isContentsTag) { allContentsElements += element immutable.Seq.empty } else { immutable.Seq(element) } } def isPathOkInRangerPolicy(path: String)(implicit id: RequestId): Boolean = { val pathToCheck = normalizePath(path) val isUserAuthorized = isUserAuthorizedForRequest(requestS3.copy(s3BucketPath = Some(pathToCheck)), user) isUserAuthorized } def normalizePath(path: String): String = { val delimiter = "/" val decodedPath = URLDecoder.decode(path, "UTF-8") val delimiterIndex = decodedPath.lastIndexOf(delimiter) val pathToCheckWithoutLastSlash = if (delimiterIndex > 0) delimiter + decodedPath.substring(0, delimiterIndex) else "" val s3BucketName = requestS3.s3BucketPath.getOrElse(delimiter) val s3pathWithoutLastDelimiter = if (s3BucketName.length > 1 && s3BucketName.endsWith(delimiter)) s3BucketName.substring(0, s3BucketName.length - 1) else s3BucketName s3pathWithoutLastDelimiter + pathToCheckWithoutLastSlash } Flow[ByteString].via(XmlParsing.parser) .statefulMapConcat(() => { // state val keyTagValue = StringBuilder.newBuilder val allContentsElements = new ListBuffer[ParseEvent] var isContentsTag = false var isKeyTag = false // aggregation function parseEvent => parseEvent match { //catch <Contents> to start collecting elements case element: StartElement if element.localName == "Contents" => isContentsTag = true allContentsElements.clear() allContentsElements += element immutable.Seq.empty //catch end </Contents> to validate the path in ranger case element: EndElement if element.localName == "Contents" => isContentsTag = false allContentsElements += element if (isPathOkInRangerPolicy(keyTagValue.stripMargin)) { allContentsElements.toList } else { immutable.Seq.empty } // catch <Key> where is the patch name to match in ranger case element: StartElement if element.localName == "Key" => keyTagValue.clear() isKeyTag = true elementResult(allContentsElements, isContentsTag, element) //catch end </Key> case element: EndElement if element.localName == "Key" => isKeyTag = false elementResult(allContentsElements, isContentsTag, element) //catch all element text <..>text<\..> but only set the text from <Key> case element: TextEvent => if (isKeyTag) keyTagValue.append(element.text) elementResult(allContentsElements, isContentsTag, element) //just past through the rest of elements case element => elementResult(allContentsElements, isContentsTag, element) } }) .via(XmlWriting.writer) } }
Example 29
Source File: AccessTokenFetcher.scala From tsec with MIT License | 5 votes |
package tsec.oauth2.provider import cats.implicits._ import java.net.URLDecoder import cats.data.NonEmptyList final case class FetchResult(token: String, params: Map[String, String]) sealed trait AccessTokenFetcher { def matches(request: ProtectedResourceRequest): Boolean def fetch(request: ProtectedResourceRequest): Either[InvalidRequest, FetchResult] } object AccessTokenFetcher { object RequestParameter extends AccessTokenFetcher { override def matches(request: ProtectedResourceRequest): Boolean = request.oauthToken.isDefined || request.accessToken.isDefined override def fetch(request: ProtectedResourceRequest): Either[InvalidRequest, FetchResult] = { val t = request.oauthToken orElse (request.accessToken) val params = request.params.filter { case (_, v) => !v.isEmpty } map { case (k, v) => (k, v.head) } t.map(s => FetchResult(s, params - ("oauth_token", "access_token"))) .toRight(InvalidRequest("missing access token")) } } object AuthHeader extends AccessTokenFetcher { val RegexpAuthorization = """^\s*(OAuth|Bearer)\s+([^\s\,]*)""".r val RegexpTrim = """^\s*,\s*""".r val RegexpDivComma = """,\s*""".r override def matches(request: ProtectedResourceRequest): Boolean = request.header("Authorization").exists { header => RegexpAuthorization.findFirstMatchIn(header).isDefined } override def fetch(request: ProtectedResourceRequest): Either[InvalidRequest, FetchResult] = for { header <- request.header("authorization").toRight(InvalidRequest("Missing authorization header")) matcher <- RegexpAuthorization.findFirstMatchIn(header).toRight(InvalidRequest("invalid Authorization header")) token = matcher.group(2) end = matcher.end params <- if (header.length != end) { val trimmedHeader = RegexpTrim.replaceFirstIn(header.substring(end), "") val pairs = RegexpDivComma.split(trimmedHeader).map { exp => val (key, value) = exp.split("=", 2) match { case Array(k, v) => (k, v.replaceFirst("^\"", "")) case Array(k) => (k, "") } val v = Either.catchNonFatal(URLDecoder.decode(value.replaceFirst("\"$", ""), "UTF-8")) v.map(vv => (key, vv)).leftMap(t => NonEmptyList.one(t.getMessage)) } pairs.toList.parSequence.map(x => Map(x: _*)).leftMap(x => InvalidRequest(x.toList.mkString(","))) } else { Right(Map.empty[String, String]) } } yield FetchResult(token, params) } }
Example 30
Source File: ActivateAccountController.scala From play-silhouette-reactivemongo-seed with Apache License 2.0 | 5 votes |
package controllers import java.net.URLDecoder import java.util.UUID import javax.inject.Inject import com.mohiva.play.silhouette.api._ import com.mohiva.play.silhouette.impl.providers.CredentialsProvider import models.services.{ AuthTokenService, UserService } import play.api.i18n.{ I18nSupport, Messages, MessagesApi } import play.api.libs.concurrent.Execution.Implicits._ import play.api.libs.mailer.{ Email, MailerClient } import play.api.mvc.Controller import utils.auth.DefaultEnv import scala.concurrent.Future import scala.language.postfixOps def activate(token: UUID) = silhouette.UnsecuredAction.async { implicit request => authTokenService.validate(token).flatMap { case Some(authToken) => userService.retrieve(authToken.userID).flatMap { case Some(user) if user.loginInfo.providerID == CredentialsProvider.ID => userService.save(user.copy(activated = true)).map { _ => Redirect(routes.SignInController.view()).flashing("success" -> Messages("account.activated")) } case _ => Future.successful(Redirect(routes.SignInController.view()).flashing("error" -> Messages("invalid.activation.link"))) } case None => Future.successful(Redirect(routes.SignInController.view()).flashing("error" -> Messages("invalid.activation.link"))) } } }
Example 31
Source File: SessionSerializer.scala From akka-http-session with Apache License 2.0 | 5 votes |
package com.softwaremill.session import java.net.{URLDecoder, URLEncoder} import scala.util.Try trait SessionSerializer[T, R] { def serialize(t: T): R def deserialize(r: R): Try[T] def deserializeV0_5_2(r: R): Try[T] = deserialize(r) } class SingleValueSessionSerializer[T, V](toValue: T => V, fromValue: V => Try[T])( implicit valueSerializer: SessionSerializer[V, String]) extends SessionSerializer[T, String] { override def serialize(t: T) = valueSerializer.serialize(toValue(t)) override def deserialize(r: String) = valueSerializer.deserialize(r).flatMap(fromValue) } class MultiValueSessionSerializer[T](toMap: T => Map[String, String], fromMap: Map[String, String] => Try[T]) extends SessionSerializer[T, String] { import SessionSerializer._ override def serialize(t: T) = toMap(t) .map { case (k, v) => urlEncode(k) + "~" + urlEncode(v) } .mkString("&") override def deserialize(s: String) = { Try { if (s == "") Map.empty[String, String] else { s.split("&") .map(_.split("~", 2)) .map(p => urlDecode(p(0)) -> urlDecode(p(1))) .toMap } }.flatMap(fromMap) } override def deserializeV0_5_2(s: String) = { Try { if (s == "") Map.empty[String, String] else { s.split("&") .map(_.split("=", 2)) .map(p => urlDecode(p(0)) -> urlDecode(p(1))) .toMap } }.flatMap(fromMap) } } object SessionSerializer { implicit def stringToStringSessionSerializer: SessionSerializer[String, String] = new SessionSerializer[String, String] { override def serialize(t: String) = urlEncode(t) override def deserialize(s: String) = Try(urlDecode(s)) } implicit def intToStringSessionSerializer: SessionSerializer[Int, String] = new SessionSerializer[Int, String] { override def serialize(t: Int) = urlEncode(t.toString) override def deserialize(s: String) = Try(urlDecode(s).toInt) } implicit def longToStringSessionSerializer: SessionSerializer[Long, String] = new SessionSerializer[Long, String] { override def serialize(t: Long) = urlEncode(t.toString) override def deserialize(s: String) = Try(urlDecode(s).toLong) } implicit def floatToStringSessionSerializer: SessionSerializer[Float, String] = new SessionSerializer[Float, String] { override def serialize(t: Float) = urlEncode(t.toString) override def deserialize(s: String) = Try(urlDecode(s).toFloat) } implicit def doubleToStringSessionSerializer: SessionSerializer[Double, String] = new SessionSerializer[Double, String] { override def serialize(t: Double) = urlEncode(t.toString) override def deserialize(s: String) = Try(urlDecode(s).toDouble) } implicit def mapToStringSessionSerializer: SessionSerializer[Map[String, String], String] = new MultiValueSessionSerializer[Map[String, String]](identity, Try(_)) private[session] def urlEncode(s: String): String = URLEncoder.encode(s, "UTF-8") private[session] def urlDecode(s: String): String = URLDecoder.decode(s, "UTF-8") }
Example 32
Source File: GameManager.scala From telegram with Apache License 2.0 | 5 votes |
package com.bot4s.telegram.api import java.net.URLDecoder import java.nio.charset.StandardCharsets import java.util.Base64 import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Directive1, Route} import com.bot4s.telegram.marshalling import com.bot4s.telegram.methods.{GetGameHighScores, SetGameScore} import com.bot4s.telegram.models.{CallbackQuery, ChatId, User} import com.bot4s.telegram.future.BotExecutionContext import io.circe.generic.extras.semiauto._ import io.circe.generic.semiauto.deriveDecoder import io.circe.{Decoder, Encoder} import scala.concurrent.Future import scala.util.{Failure, Success} case class Payload( user : User, chatId : Option[ChatId] = None, messageId : Option[Int] = None, inlineMessageId : Option[String] = None, gameManagerHost : String, gameShortName : String) { def toGetGameHighScores = GetGameHighScores(user.id, chatId, messageId, inlineMessageId) def base64Encode: String = { val payloadJson = marshalling.toJson[Payload](this) val encodedPayload = Base64.getEncoder.encodeToString( payloadJson.getBytes(StandardCharsets.UTF_8)) encodedPayload } } object Payload { def base64Decode(encodedPayload: String): Payload = { val base64payload = URLDecoder.decode(encodedPayload, "UTF-8") val jsonPayload = new String(Base64.getDecoder.decode(base64payload), StandardCharsets.UTF_8) val payload = marshalling.fromJson[Payload](jsonPayload) payload } def forCallbackQuery(gameManagerHost: String)(implicit cbq: CallbackQuery): Payload = { Payload( cbq.from, cbq.message.map(_.source), cbq.message.map(_.messageId), cbq.inlineMessageId, gameManagerHost, cbq.gameShortName.get) // throws if not a game callback } import marshalling._ implicit val payloadEncoder: Encoder[Payload] = deriveEncoder[Payload] implicit val payloadDecoder: Decoder[Payload] = deriveDecoder[Payload] }
Example 33
Source File: DemoFileDownloadServlet.scala From udash-demos with GNU General Public License v3.0 | 5 votes |
package io.udash.demos.files.jetty import java.io.File import java.net.URLDecoder import java.nio.charset.StandardCharsets import javax.servlet.http.HttpServletRequest import io.udash.demos.files.services.FilesStorage import io.udash.rpc.utils.FileDownloadServlet class DemoFileDownloadServlet(filesDir: String, contextPrefix: String) extends FileDownloadServlet { override protected def resolveFile(request: HttpServletRequest): File = { val name = URLDecoder.decode(request.getRequestURI.stripPrefix(contextPrefix + "/"), StandardCharsets.UTF_8.name()) new File(filesDir, name) } override protected def presentedFileName(name: String): String = FilesStorage.allFiles .find(_.serverFileName == name) .map(_.name) .getOrElse(name) }
Example 34
Source File: HistoryNotFoundPage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[spark] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 35
Source File: ExecutorThreadDumpPage.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when // running in yarn-cluster mode. `request.getParameter("executorId")` will return // "%253Cdriver%253E". Therefore we need to decode it until we get the real id. var id = executorId var decodedId = URLDecoder.decode(id, "UTF-8") while (id != decodedId) { id = decodedId decodedId = URLDecoder.decode(id, "UTF-8") } id }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.map { thread => <div class="accordion-group"> <div class="accordion-heading" onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {thread.threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 36
Source File: ActivateAccountController.scala From silhouette-vuejs-app with Apache License 2.0 | 5 votes |
package controllers import java.net.URLDecoder import java.util.UUID import javax.inject.Inject import com.mohiva.play.silhouette.api._ import com.mohiva.play.silhouette.impl.providers.CredentialsProvider import models.services.{AuthTokenService, MailService, UserService} import play.api.mvc.{AbstractController, ControllerComponents} import utils.auth.DefaultEnv import scala.concurrent.{ExecutionContext, Future} def activate(token: UUID) = silhouette.UnsecuredAction.async { authTokenService.validate(token).flatMap { case Some(authToken) => userService.retrieveUserLoginInfo(authToken.userID, CredentialsProvider.ID).flatMap { case Some((user, _)) => userService.setEmailActivated(user).map { _ => Redirect("/signin?message=emailVerified") } case _ => Future.successful(Redirect("/error?message=activationTokenInvalid")) } case None => Future.successful(Redirect("/error?message=activationTokenInvalid")) } } }
Example 37
Source File: ActivateAccountController.scala From play-silhouette-4.0-slick-postgres-seed with Apache License 2.0 | 5 votes |
package controllers.auth import java.net.URLDecoder import java.util.UUID import javax.inject.Inject import com.mohiva.play.silhouette.api._ import com.mohiva.play.silhouette.impl.providers.CredentialsProvider import controllers.{ WebJarAssets, auth } import models.services.{ AuthTokenService, UserService } import play.api.i18n.{ I18nSupport, Messages, MessagesApi } import play.api.libs.concurrent.Execution.Implicits._ import play.api.libs.mailer.{ Email, MailerClient } import play.api.mvc.{ Action, AnyContent, Controller } import utils.auth.DefaultEnv import scala.concurrent.Future import scala.language.postfixOps def activate(token: UUID): Action[AnyContent] = silhouette.UnsecuredAction.async { implicit request => authTokenService.validate(token).flatMap { case Some(authToken) => userService.retrieve(authToken.userID).flatMap { case Some(user) if user.loginInfo.providerID == CredentialsProvider.ID => userService.save(user.copy(activated = true)).map { _ => Redirect(auth.routes.SignInController.view()).flashing("success" -> Messages("account.activated")) } case _ => Future.successful(Redirect(auth.routes.SignInController.view()).flashing("error" -> Messages("invalid.activation.link"))) } case None => Future.successful(Redirect(auth.routes.SignInController.view()).flashing("error" -> Messages("invalid.activation.link"))) } } }
Example 38
Source File: TileRequest.scala From franklin with Apache License 2.0 | 5 votes |
package com.azavea.franklin.datamodel import eu.timepit.refined.types.numeric.NonNegInt import eu.timepit.refined.types.string.NonEmptyString import java.net.URLDecoder import java.nio.charset.StandardCharsets sealed trait TileMatrixRequest { val z: Int val x: Int val y: Int val collection: String def urlDecode(rawString: String): String = URLDecoder.decode(rawString, StandardCharsets.UTF_8.toString) } case class ItemRasterTileRequest( collectionRaw: String, itemRaw: String, z: Int, x: Int, y: Int, asset: String, redBandOption: Option[Int], greenBandOption: Option[Int], blueBandOption: Option[Int], upperQuantileOption: Option[Quantile], lowerQuantileOption: Option[Quantile], singleBand: Option[NonNegInt] ) extends TileMatrixRequest { val collection = urlDecode(collectionRaw) val item = urlDecode(itemRaw) val redBand = redBandOption.getOrElse(0) val greenBand = greenBandOption.getOrElse(1) val blueBand = blueBandOption.getOrElse(2) val bands = Seq(redBand, greenBand, blueBand) // Because lists are 0 indexed and humans are 1 indexed we need to adjust val upperQuantile = upperQuantileOption.map(_.value).getOrElse(100) - 1 val lowerQuantile = lowerQuantileOption.map(_.value).getOrElse(-1) + 1 val zxy = (z, x, y) } case class MapboxVectorTileFootprintRequest( collectionRaw: String, z: Int, x: Int, y: Int, colorField: NonEmptyString ) extends TileMatrixRequest { val collection = urlDecode(collectionRaw) }
Example 39
Source File: UrlEncodedFormBody.scala From fintrospect with Apache License 2.0 | 5 votes |
package io.fintrospect.parameters import java.net.{URLDecoder, URLEncoder} import com.twitter.finagle.http._ import io.fintrospect.ContentTypes.APPLICATION_FORM_URLENCODED import io.fintrospect.util.{Extraction, ExtractionError, ExtractionFailed, Extractor} import scala.util.{Failure, Success, Try} case class UrlEncodedFormBody(formContents: Seq[FormField[_] with Extractor[Form, _]], validator: FormValidator, extractor: FormFieldExtractor) extends Body[Form] { override val contentType = APPLICATION_FORM_URLENCODED override def iterator = formContents.iterator private def decodeFields(content: String): Map[String, Seq[String]] = { content .split("&") .filter(_.contains("=")) .map(nvp => { val parts = nvp.split("=") (URLDecoder.decode(parts(0), "UTF-8"), if (parts.length > 1) URLDecoder.decode(parts(1), "UTF-8") else "") }) .groupBy(_._1) .mapValues(_.map(_._2)) } private def encode(form: Form): String = form.fields.flatMap { case (name, values) => values.map(value => URLEncoder.encode(name, "UTF-8") + "=" + URLEncoder.encode(value, "UTF-8")) }.mkString("&") override def -->(value: Form): Seq[RequestBinding] = Seq(new RequestBinding(null, req => { val contentString = encode(value) req.headerMap.add("Content-type", contentType.value) req.headerMap.add("Content-length", contentString.length.toString) req.contentString = contentString req })) ++ formContents.map(f => new FormFieldBinding(f, "")) override def <--?(message: Message): Extraction[Form] = Try(validator(formContents, new Form(decodeFields(message.contentString), Map.empty, Nil))) match { case Success(form) => extractor(formContents, form) case Failure(_) => ExtractionFailed(formContents.filter(_.required).map(param => ExtractionError(param, "Could not parse"))) } }
Example 40
Source File: SchemaFileLocatable.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil.exceptions import java.net.URLDecoder import org.apache.daffodil.api.LocationInSchemaFile import org.apache.daffodil.schema.annotation.props.LookupLocation import org.apache.daffodil.util.TransientParam trait HasSchemaFileLocation extends LookupLocation { override def schemaFileLocation: SchemaFileLocation override def lineDescription: String = schemaFileLocation.lineDescription override def columnDescription: String = schemaFileLocation.columnDescription override def fileDescription: String = schemaFileLocation.fileDescription override def locationDescription: String = schemaFileLocation.locationDescription } class SchemaFileLocation(@TransientParam context: SchemaFileLocatable) extends LocationInSchemaFile with Serializable { val lineNumber = context.lineNumber val columnNumber = context.columnNumber val uriString: String = context.uriString override def lineDescription = lineNumber match { case Some(num) => " line " + num case None => "" } override def columnDescription = columnNumber match { case Some(num) => " column " + num case None => "" } override val toString = context.toString val diagnosticDebugName: String = context.diagnosticDebugName override def fileDescription = " in " + URLDecoder.decode(uriString, "UTF-8") override def locationDescription = { val showInfo = lineDescription != "" || fileDescription != "" val info = lineDescription + columnDescription + fileDescription val txt = if (showInfo) "Location" + info else "" txt } } trait SchemaFileLocatable extends LocationInSchemaFile with HasSchemaFileLocation { def lineAttribute: Option[String] def columnAttribute: Option[String] def fileAttribute: Option[String] def diagnosticDebugName: String lazy val lineNumber: Option[String] = lineAttribute match { case Some(seqNodes) => Some(seqNodes.toString) case None => None } override lazy val lineDescription = lineNumber match { case Some(num) => " line " + num case None => "" } lazy val columnNumber = columnAttribute match { case Some(seqNodes) => Some(seqNodes.toString) case None => None } override lazy val columnDescription = columnNumber match { case Some(num) => " column " + num case None => "" } // URLDecoder removes %20, etc from the file name. override lazy val fileDescription = " in " + URLDecoder.decode(uriString, "UTF-8") override lazy val locationDescription = { val showInfo = lineDescription != "" || fileDescription != "" val info = lineDescription + columnDescription + fileDescription val txt = if (showInfo) "Location" + info else "" txt } def uriString: String lazy val uriStringFromAttribute = { fileAttribute match { case Some(seqNodes) => Some(seqNodes.toString) case None => None } } override lazy val schemaFileLocation = new SchemaFileLocation(this) }
Example 41
Source File: RepairFromMessages.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg import java.net.URLDecoder import java.util.UUID import akka.actor.ActorSystem import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.persistence.query.PersistenceQuery import ch.epfl.bluebrain.nexus.kg.resources.{Id, Repo, ResId} import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef import ch.epfl.bluebrain.nexus.rdf.Iri import com.typesafe.scalalogging.Logger import monix.eval.Task import monix.execution.Scheduler import monix.execution.schedulers.CanBlock import scala.concurrent.Future import scala.util.Try object RepairFromMessages { // $COVERAGE-OFF$ private val log = Logger[RepairFromMessages.type] def repair(repo: Repo[Task])(implicit as: ActorSystem, sc: Scheduler, pm: CanBlock): Unit = { log.info("Repairing dependent tables from messages.") val pq = PersistenceQuery(as).readJournalFor[CassandraReadJournal](CassandraReadJournal.Identifier) Task .fromFuture { pq.currentPersistenceIds() .mapAsync(1) { case ResourceId(id) => (repo.get(id, None).value >> Task.unit).runToFuture case other => log.warn(s"Unknown persistence id '$other'") Future.successful(()) } .runFold(0) { case (acc, _) => if (acc % 1000 == 0) log.info(s"Processed '$acc' persistence ids.") acc + 1 } .map(_ => ()) } .runSyncUnsafe() log.info("Finished repairing dependent tables from messages.") } object ResourceId { private val regex = "^resources\\-([0-9a-fA-F]{8}\\-[0-9a-fA-F]{4}\\-[0-9a-fA-F]{4}\\-[0-9a-fA-F]{4}\\-[0-9a-fA-F]{12})\\-(.+)$".r def unapply(arg: String): Option[ResId] = arg match { case regex(stringUuid, stringId) => for { uuid <- Try(UUID.fromString(stringUuid)).toOption iri <- Iri.absolute(URLDecoder.decode(stringId, "UTF-8")).toOption } yield Id(ProjectRef(uuid), iri) case _ => None } } // $COVERAGE-ON$ }
Example 42
Source File: RepairFromMessages.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.iam import java.net.URLDecoder import akka.actor.ActorSystem import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.persistence.query.PersistenceQuery import ch.epfl.bluebrain.nexus.iam.acls.Acls import ch.epfl.bluebrain.nexus.iam.permissions.Permissions import ch.epfl.bluebrain.nexus.iam.realms.Realms import ch.epfl.bluebrain.nexus.iam.types.Label import ch.epfl.bluebrain.nexus.rdf.Iri.Path import com.typesafe.scalalogging.Logger import monix.eval.Task import monix.execution.Scheduler import monix.execution.schedulers.CanBlock import scala.concurrent.Future object RepairFromMessages { // $COVERAGE-OFF$ private val log = Logger[RepairFromMessages.type] def repair( p: Permissions[Task], r: Realms[Task], a: Acls[Task] )(implicit as: ActorSystem, sc: Scheduler, pm: CanBlock): Unit = { val pq = PersistenceQuery(as).readJournalFor[CassandraReadJournal](CassandraReadJournal.Identifier) pq.currentPersistenceIds() .mapAsync(1) { case PermissionsId() => p.agg.currentState(p.persistenceId).runToFuture case RealmId(label) => r.agg.currentState(label.value).runToFuture case AclId(path) => a.agg.currentState(path.asString).runToFuture case other => log.warn(s"Unknown persistence id '$other'") Future.successful(()) } .runFold(0) { case (acc, _) => if (acc % 100 == 0) log.info(s"Processed '$acc' persistence ids.") acc + 1 } .runSyncDiscard() log.info("Repair from messages table completed.") } sealed abstract class PersistenceId(prefix: String) { private val len = prefix.length protected def dropPrefix(arg: String): Option[String] = if (arg.startsWith(prefix)) Some(arg.drop(len)) else None } object RealmId extends PersistenceId("realms-") { def unapply(arg: String): Option[Label] = dropPrefix(arg).map(Label.unsafe) } object AclId extends PersistenceId("acls-") { def unapply(arg: String): Option[Path] = dropPrefix(arg).flatMap(str => Path(URLDecoder.decode(str, "UTF-8")).toOption) } object PermissionsId { def unapply(arg: String): Boolean = arg == "permissions-permissions" } implicit class RichFuture[A](val future: Future[A]) extends AnyVal { def runSyncDiscard()(implicit s: Scheduler, permit: CanBlock): Unit = Task.fromFuture(future).map(_ => ()).runSyncUnsafe() } // $COVERAGE-ON$ }
Example 43
Source File: Dependencies.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.cli.resolve import java.net.{URL, URLDecoder} import cats.data.{Validated, ValidatedNel} import cats.implicits._ import coursier.core.{Configuration, Dependency, Exclusions, Module, ModuleName, Organization} import coursier.parse.{DependencyParser, JavaOrScalaDependency, JavaOrScalaModule} object Dependencies { def handleDependencies( rawDependencies: Seq[String] ): ValidatedNel[String, List[(JavaOrScalaDependency, Map[String, String])]] = rawDependencies .map { s => DependencyParser.javaOrScalaDependencyParams(s) match { case Left(error) => Validated.invalidNel(error) case Right(d) => Validated.validNel(List(d)) } } .toList .flatSequence def withExtraRepo( rawDependencies: Seq[String], extraDependencies: Seq[(JavaOrScalaDependency, Map[String, String])] ): Either[Throwable, (List[JavaOrScalaDependency], Map[(JavaOrScalaModule, String), URL])] = handleDependencies(rawDependencies) match { case Validated.Valid(l) => val l0 = l ++ extraDependencies val deps = l0.map(_._1) val extraRepo = // Any dependencies with URIs should not be resolved with a pom so this is a // hack to add all the deps with URIs to the FallbackDependenciesRepository // which will be used during the resolve l0.flatMap { case (dep, extraParams) => extraParams.get("url").map { url => (dep.module, dep.version) -> new URL(URLDecoder.decode(url, "UTF-8")) } }.toMap Right((deps, extraRepo)) case Validated.Invalid(err) => Left(new ResolveException( "Error processing dependencies:\n" + err.toList.map(" " + _).mkString("\n") )) } def addExclusions( dep: Dependency, perModuleExclude: Map[Module, Set[Module]], ): Dependency = perModuleExclude.get(dep.module) match { case None => dep case Some(exclusions) => dep.withExclusions( Exclusions.minimize(dep.exclusions ++ exclusions.map(m => (m.organization, m.name))) ) } def addExclusions( deps: Seq[Dependency], perModuleExclude: Map[Module, Set[Module]], ): Seq[Dependency] = deps.map { dep => addExclusions(dep, perModuleExclude) } }
Example 44
Source File: CmWellConsumeHandler.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.tools.neptune.export import java.net.{URL, URLDecoder, URLEncoder} import java.time.Instant import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet} import org.apache.http.impl.client.DefaultHttpClient import org.apache.http.util.EntityUtils import org.slf4j.LoggerFactory object CmWellConsumeHandler { protected lazy val logger = LoggerFactory.getLogger("cm_well_consumer") val maxRetry = 5 private val sleepTimeout = 10000 def bulkConsume(cluster: String, position: String, format: String, updateMode:Boolean, retryCount:Int= 0): CloseableHttpResponse = { val withMeta = if(updateMode) "&with-meta" else "" val url = "http://" + cluster + "/_bulk-consume?position=" + position + "&format=" + format + withMeta val client = new DefaultHttpClient client.setHttpRequestRetryHandler(new CustomHttpClientRetryHandler()) val get = new HttpGet(url) logger.info("Going to bulk consume,url= " + url) val response = client.execute(get) val statusCode = response.getStatusLine.getStatusCode if (statusCode != 200 && statusCode != 204) { if(statusCode == 503) { logger.error("Failed to bulk consume, error status code=" + statusCode + "response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry...") Thread.sleep(sleepTimeout) bulkConsume(cluster, position, format, updateMode) } else{ if (retryCount < maxRetry) { logger.error("Failed to bulk consume, error status code=" + statusCode + "response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry...,retry count=" + retryCount) Thread.sleep(sleepTimeout) bulkConsume(cluster, position, format, updateMode, retryCount + 1) } else { throw new Throwable("Failed to consume from cm-well, error code status=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity)) } } } response } def retrivePositionFromCreateConsumer(cluster: String, lengthHint: Int, qp: Option[String], updateMode:Boolean, automaticUpdateMode:Boolean, toolStartTime:Instant, retryCount:Int = 0): String = { val withDeletedParam = if(updateMode || automaticUpdateMode) "&with-deleted" else "" //initial mode val qpTillStartTime = if(!updateMode && !automaticUpdateMode) URLEncoder.encode(",system.lastModified<") + toolStartTime.toString else "" //automatic update mode val qpAfterStartTime = if(!updateMode && automaticUpdateMode) URLEncoder.encode(",system.lastModified>>" )+ toolStartTime.toString else "" val createConsumerUrl = "http://" + cluster + "/?op=create-consumer&qp=-system.parent.parent_hierarchy:/meta/" + qp.getOrElse("") + qpTillStartTime + qpAfterStartTime + "&recursive&length-hint=" + lengthHint + withDeletedParam logger.info("create-consumer-url=" + createConsumerUrl) val get = new HttpGet(createConsumerUrl) val client = new DefaultHttpClient client.setHttpRequestRetryHandler(new CustomHttpClientRetryHandler()) val response = client.execute(get) val res = response.getAllHeaders.find(_.getName == "X-CM-WELL-POSITION").map(_.getValue).getOrElse("") logger.info("create-Consumer http status=" + response.getStatusLine.getStatusCode) val statusCode = response.getStatusLine.getStatusCode if (statusCode != 200) { if(statusCode == 503){ logger.error("Failed to retrieve position via create-consumer api,error status code=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry...") Thread.sleep(sleepTimeout) retrivePositionFromCreateConsumer(cluster, lengthHint, qp, updateMode, automaticUpdateMode, toolStartTime) }else { if (retryCount < maxRetry) { logger.error("Failed to retrieve position via create-consumer api,error status code=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity) + ".Going to retry..., retry count=" + retryCount) Thread.sleep(sleepTimeout) retrivePositionFromCreateConsumer(cluster, lengthHint, qp, updateMode, automaticUpdateMode, toolStartTime, retryCount+1) } else { throw new Throwable("Failed to consume from cm-well, error code status=" + statusCode + ", response entity=" + EntityUtils.toString(response.getEntity)) } } } res } }
Example 45
Source File: HistoryNotFoundPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class HistoryNotFoundPage(parent: MasterWebUI) extends WebUIPage("history/not-found") { def render(request: HttpServletRequest): Seq[Node] = { val titleParam = request.getParameter("title") val msgParam = request.getParameter("msg") val exceptionParam = request.getParameter("exception") // If no parameters are specified, assume the user did not enable event logging val defaultTitle = "Event logging is not enabled" val defaultContent = <div class="row-fluid"> <div class="span12" style="font-size:14px"> No event logs were found for this application! To <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>, set <span style="font-style:italic">spark.eventLog.enabled</span> to true and <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your event logs are written. </div> </div> val title = Option(titleParam).getOrElse(defaultTitle) val content = Option(msgParam) .map { msg => URLDecoder.decode(msg, "UTF-8") } .map { msg => <div class="row-fluid"> <div class="span12" style="font-size:14px">{msg}</div> </div> ++ Option(exceptionParam) .map { e => URLDecoder.decode(e, "UTF-8") } .map { e => <pre>{e}</pre> } .getOrElse(Seq.empty) }.getOrElse(defaultContent) UIUtils.basicSparkPage(content, title) } }
Example 46
Source File: ExecutorThreadDumpPage.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.ui.exec import java.net.URLDecoder import javax.servlet.http.HttpServletRequest import scala.util.Try import scala.xml.{Text, Node} import org.apache.spark.ui.{UIUtils, WebUIPage} private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") { private val sc = parent.sc def render(request: HttpServletRequest): Seq[Node] = { val executorId = Option(request.getParameter("executorId")).map { executorId => // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when // running in yarn-cluster mode. `request.getParameter("executorId")` will return // "%253Cdriver%253E". Therefore we need to decode it until we get the real id. var id = executorId var decodedId = URLDecoder.decode(id, "UTF-8") while (id != decodedId) { id = decodedId decodedId = URLDecoder.decode(id, "UTF-8") } id }.getOrElse { throw new IllegalArgumentException(s"Missing executorId parameter") } val time = System.currentTimeMillis() val maybeThreadDump = sc.get.getExecutorThreadDump(executorId) val content = maybeThreadDump.map { threadDump => val dumpRows = threadDump.map { thread => <div class="accordion-group"> <div class="accordion-heading" onclick="$(this).next().toggleClass('hidden')"> <a class="accordion-toggle"> Thread {thread.threadId}: {thread.threadName} ({thread.threadState}) </a> </div> <div class="accordion-body hidden"> <div class="accordion-inner"> <pre>{thread.stackTrace}</pre> </div> </div> </div> } <div class="row-fluid"> <p>Updated at {UIUtils.formatDate(time)}</p> { // scalastyle:off <p><a class="expandbutton" onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Expand All </a></p> <p><a class="expandbutton hidden" onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')"> Collapse All </a></p> // scalastyle:on } <div class="accordion">{dumpRows}</div> </div> }.getOrElse(Text("Error fetching thread dump")) UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent) } }
Example 47
Source File: JavaInvalidCharacterEscapingTest.scala From guardrail with MIT License | 5 votes |
package core.Dropwizard import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.databind.ObjectMapper import invalidCharacters.client.dropwizard.invalidCharacters.InvalidCharactersClient import invalidCharacters.server.dropwizard.definitions.{InvalidCharacters, InvalidCharactersEnum} import io.netty.buffer.Unpooled import java.net.{SocketAddress, URI, URLDecoder} import java.util.concurrent.{CompletableFuture, CompletionStage} import java.util.function import org.asynchttpclient.Response.ResponseBuilder import org.asynchttpclient.netty.EagerResponseBodyPart import org.asynchttpclient.uri.Uri import org.asynchttpclient.{HttpResponseStatus, Request, Response} import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.must.Matchers import scala.collection.JavaConverters._ object JavaInvalidCharacterEscapingTest { private implicit class RichString(private val s: String) extends AnyVal { def dec: String = URLDecoder.decode(s, "UTF-8") } private object OkStatus extends HttpResponseStatus(Uri.create("http://localhost:1234/foo?foo^bar=query-param")) { override def getStatusCode = 200 override def getStatusText = "OK" override def getProtocolName = "HTTP" override def getProtocolMajorVersion = 1 override def getProtocolMinorVersion = 1 override def getProtocolText = "HTTP/1.1" override def getRemoteAddress: SocketAddress = ??? override def getLocalAddress: SocketAddress = ??? } } class JavaInvalidCharacterEscapingTest extends AnyFreeSpec with Matchers { import JavaInvalidCharacterEscapingTest._ "Invalid characters in Java enums should be escaped" in { InvalidCharactersEnum.NORMAL.getName mustBe "normal" InvalidCharactersEnum.BANG_MOO_COLON_COW_SEMICOLON.getName mustBe "!moo:cow;" InvalidCharactersEnum.POUND_YEAH.getName mustBe "#yeah" InvalidCharactersEnum.WEIRD_AT.getName mustBe "weird@" } "Invalid characters in Java POJO properties should be escaped" in { val invChar = new InvalidCharacters.Builder("stuff", InvalidCharactersEnum.POUND_YEAH).build() invChar.getCloseSquareBraceMoo mustBe "stuff" invChar.getSomeEnumAsteriskCaret mustBe InvalidCharactersEnum.POUND_YEAH classOf[InvalidCharacters].getDeclaredField("closeSquareBraceMoo").getAnnotation(classOf[JsonProperty]).value mustBe "]moo" classOf[InvalidCharacters].getDeclaredField("someEnumAsteriskCaret").getAnnotation(classOf[JsonProperty]).value mustBe "some-enum*^" } "Invalid characters in Java operation param names should be escaped" in { val httpClient = new function.Function[Request, CompletionStage[Response]] { override def apply(request: Request): CompletionStage[Response] = { println(request.getUri) println(request.getQueryParams.asScala.map(_.getName)) val qps = request.getQueryParams.asScala.map(p => (p.getName.dec, p.getValue.dec)) val fps = request.getFormParams.asScala.map(p => (p.getName.dec, p.getValue.dec)) qps.find(_._1 == "foo^bar").map(_._2) mustBe Some("firstarg") fps.find(_._1 == "a*b").map(_._2) mustBe Some("secondarg") fps.find(_._1 == "bc?").map(_._2) mustBe Some("thirdarg") fps.find(_._1 == "d/c").map(_._2) mustBe Some("fourtharg") val response = new ResponseBuilder() response.accumulate(OkStatus) response.accumulate(new EagerResponseBodyPart( Unpooled.copiedBuffer(new ObjectMapper().writeValueAsBytes(new InvalidCharacters.Builder("foo", InvalidCharactersEnum.WEIRD_AT).build())), true )) CompletableFuture.completedFuture(response.build()) } } val client = new InvalidCharactersClient.Builder(new URI("http://localhost:1234")).withHttpClient(httpClient).build() val response = client.getFoo("firstarg", "secondarg", "thirdarg", "fourtharg").call().toCompletableFuture.get() response.fold( { invChar => invChar.getCloseSquareBraceMoo mustBe "foo" invChar.getSomeEnumAsteriskCaret mustBe invalidCharacters.client.dropwizard.definitions.InvalidCharactersEnum.WEIRD_AT } ) } }