scala.collection.JavaConversions.mapAsJavaMap Scala Examples
The following examples show how to use scala.collection.JavaConversions.mapAsJavaMap.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DWSHttpClient.scala From Linkis with Apache License 2.0 | 5 votes |
class DWSHttpClient(clientConfig: DWSClientConfig, clientName: String) extends AbstractHttpClient(clientConfig, clientName) { override protected def createDiscovery(): Discovery = new DWSGatewayDiscovery override protected def prepareAction(requestAction: HttpAction): HttpAction = { requestAction match { case dwsAction: DWSHttpAction => dwsAction.setDWSVersion(clientConfig.getDWSVersion) case _ => } requestAction } override protected def httpResponseToResult(response: HttpResponse, requestAction: HttpAction, responseBody: String): Option[Result] = { var entity = response.getEntity val statusCode: Int = response.getStatusLine.getStatusCode val url: String = requestAction.getURL val contentType: String = entity.getContentType.getValue DWSHttpMessageFactory.getDWSHttpMessageResult(url).map { case DWSHttpMessageResultInfo(_, clazz) => clazz match { case c if ClassUtils.isAssignable(c, classOf[DWSResult]) => val dwsResult = clazz.getConstructor().newInstance().asInstanceOf[DWSResult] dwsResult.set(responseBody, statusCode, url, contentType) BeanUtils.populate(dwsResult, dwsResult.getData) return Some(dwsResult) case _ => } def transfer(value: Result, map: Map[String, Object]): Unit = { value match { case httpResult: HttpResult => httpResult.set(responseBody, statusCode, url, contentType) case _ => } val javaMap = mapAsJavaMap(map) BeanUtils.populate(value, javaMap) fillResultFields(javaMap, value) } deserializeResponseBody(response) match { case map: Map[String, Object] => val value = clazz.getConstructor().newInstance().asInstanceOf[Result] transfer(value, map) value case list: List[Map[String, Object]] => val results = list.map { map => val value = clazz.getConstructor().newInstance().asInstanceOf[Result] transfer(value, map) value }.toArray new ListResult(responseBody, results) } }.orElse(nonDWSResponseToResult(response, requestAction)) } protected def nonDWSResponseToResult(response: HttpResponse, requestAction: HttpAction): Option[Result] = None protected def fillResultFields(responseMap: util.Map[String, Object], value: Result): Unit = {} //TODO Consistent with workspace, plus expiration time(与workspace保持一致,加上过期时间) override protected def getFsByUser(user: String, path: FsPath): Fs = FSFactory.getFsByProxyUser(path, user) } object DWSHttpClient { val jacksonJson = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) }
Example 2
Source File: LogAnalyticsStreamingQueryListenerSuite.scala From spark-monitoring with MIT License | 5 votes |
package org.apache.spark.sql.streaming import java.util.UUID import org.apache.spark.listeners.ListenerSuite import org.apache.spark.sql.streaming.StreamingQueryListener.{QueryProgressEvent, QueryStartedEvent, QueryTerminatedEvent} import org.scalatest.BeforeAndAfterEach import scala.collection.JavaConversions.mapAsJavaMap object LogAnalyticsStreamingQueryListenerSuite { val queryStartedEvent = new QueryStartedEvent(UUID.randomUUID, UUID.randomUUID, "name") val queryTerminatedEvent = new QueryTerminatedEvent(UUID.randomUUID, UUID.randomUUID, None) val queryProgressEvent = new QueryProgressEvent( new StreamingQueryProgress( UUID.randomUUID, UUID.randomUUID, null, ListenerSuite.EPOCH_TIME_AS_ISO8601, 2L, mapAsJavaMap(Map("total" -> 0L)), mapAsJavaMap(Map.empty[String, String]), Array(new StateOperatorProgress( 0, 1, 2)), Array( new SourceProgress( "source", "123", "456", 678, Double.NaN, Double.NegativeInfinity ) ), new SinkProgress("sink") ) ) } class LogAnalyticsStreamingQueryListenerSuite extends ListenerSuite with BeforeAndAfterEach { test("should invoke sendToSink for QueryStartedEvent with full class name") { val (json, event) = this.onStreamingQueryListenerEvent( LogAnalyticsStreamingQueryListenerSuite.queryStartedEvent ) this.assertEvent(json, event) } test("should invoke sendToSink for QueryTerminatedEvent with full class name") { val (json, event) = this.onStreamingQueryListenerEvent( LogAnalyticsStreamingQueryListenerSuite.queryTerminatedEvent ) this.assertEvent(json, event) } test("should invoke sendToSink for QueryProgressEvent with full class name") { val (json, event) = this.onStreamingQueryListenerEvent( LogAnalyticsStreamingQueryListenerSuite.queryProgressEvent ) this.assertEvent(json, event) } test("QueryProgressEvent should have expected SparkEventTime") { val (json, _) = this.onStreamingQueryListenerEvent( LogAnalyticsStreamingQueryListenerSuite.queryProgressEvent ) this.assertSparkEventTime( json, (_, value) => assert(value.extract[String] === ListenerSuite.EPOCH_TIME_AS_ISO8601) ) } test("QueryStartedEvent should have SparkEventTime") { val (json, _) = this.onStreamingQueryListenerEvent( LogAnalyticsStreamingQueryListenerSuite.queryStartedEvent ) this.assertSparkEventTime( json, (_, value) => assert(!value.extract[String].isEmpty) ) } test("QueryTerminatedEvent should have SparkEventTime") { val (json, _) = this.onStreamingQueryListenerEvent( LogAnalyticsStreamingQueryListenerSuite.queryTerminatedEvent ) this.assertSparkEventTime( json, (_, value) => assert(!value.extract[String].isEmpty) ) } }
Example 3
Source File: ScalaObjectHandler.scala From fintrospect with Apache License 2.0 | 5 votes |
package io.fintrospect.templating import java.io.Writer import java.lang.reflect.{Field, Method} import com.github.mustachejava.Iteration import com.github.mustachejava.reflect.ReflectionObjectHandler import scala.collection.JavaConversions.mapAsJavaMap import scala.reflect.ClassTag import scala.runtime.BoxedUnit class ScalaObjectHandler extends ReflectionObjectHandler { override def checkMethod(member: Method) {} override def checkField(member: Field) {} override def coerce(value: AnyRef) = value match { case m: collection.Map[_, _] => mapAsJavaMap(m) case _: BoxedUnit => null case Some(some: AnyRef) => coerce(some) case None => null case _ => value } override def iterate(iteration: Iteration, writer: Writer, value: AnyRef, scopes: java.util.List[AnyRef]) = value match { case TraversableAnyRef(t) => { var newWriter = writer t foreach (next => newWriter = iteration.next(newWriter, coerce(next), scopes)) newWriter } case n: Number => if (n.intValue() == 0) writer else iteration.next(writer, coerce(value), scopes) case _ => super.iterate(iteration, writer, value, scopes) } override def falsey(iteration: Iteration, writer: Writer, value: AnyRef, scopes: java.util.List[AnyRef]) = value match { case TraversableAnyRef(t) => if (t.isEmpty) iteration.next(writer, value, scopes) else writer case n: Number => if (n.intValue() == 0) iteration.next(writer, coerce(value), scopes) else writer case _ => super.falsey(iteration, writer, value, scopes) } private val TraversableAnyRef = new Def[Traversable[AnyRef]] private class Def[C: ClassTag] { def unapply[X: ClassTag](x: X): Option[C] = x match { case c: C => Some(c) case _ => None } } }
Example 4
Source File: Mappers.scala From common4s with Apache License 2.0 | 5 votes |
package commons.mapper import java.sql.ResultSet import scala.collection.JavaConversions.{ mapAsJavaMap, mapAsScalaMap } import scala.collection.concurrent.TrieMap object Mappers { private val beanToMapMapperCache = new TrieMap[Class[_], BeanToMapMapper] private val mapToBeanMapperCache = new TrieMap[Class[_], MapToBeanMapper] private val autoConvertTypeMapToBeanMapperCache = new TrieMap[Class[_], MapToBeanMapper] private val resultSetMapperCache = new TrieMap[Class[_], ResultSetMapper] def beanToMap(any : AnyRef) : collection.Map[String, Any] = { val map = beanToMapMapperCache .getOrElseUpdate(any.getClass, BeanToMapMapper.createMapper(any.getClass)) .map(any) mapAsScalaMap(map) } def mapToBean[T](map : collection.Map[String, Any])(implicit classTag : scala.reflect.ClassTag[T]) : T = { mapToBean(map, false) } def mapToBean[T](map : collection.Map[String, Any], autoConvert : Boolean)(implicit classTag : scala.reflect.ClassTag[T]) : T = { val clazz = classTag.runtimeClass val mapper = if (!autoConvert) mapToBeanMapperCache.getOrElseUpdate(clazz, MapToBeanMapper.createMapper(classTag.runtimeClass)) else autoConvertTypeMapToBeanMapperCache.getOrElseUpdate(clazz, MapToBeanMapper.createMapper(classTag.runtimeClass, true)) mapper.map(mapAsJavaMap(map)).asInstanceOf[T] } def resultSetToBean[T](rs : ResultSet)(implicit classTag : scala.reflect.ClassTag[T]) : T = { val clazz = classTag.runtimeClass resultSetMapperCache.getOrElseUpdate(clazz, ResultSetMapper.createMapper(clazz)).map(rs).asInstanceOf[T] } def resultSetToMap(rs : ResultSet) : collection.Map[String, Any] = { resultSetToBean[collection.Map[String, Any]](rs) } }