java.util.concurrent.ConcurrentMap Scala Examples
The following examples show how to use java.util.concurrent.ConcurrentMap.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ParameterizedType.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.types.api import java.util.concurrent.ConcurrentMap import com.intellij.util.containers.ContainerUtil import org.jetbrains.plugins.scala.extensions.TraversableExt import org.jetbrains.plugins.scala.lang.psi.types.api.ParameterizedType.substitutorCache import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.Update import org.jetbrains.plugins.scala.lang.psi.types.{ScSubstitutor, ScType} import org.jetbrains.plugins.scala.project.ProjectContext trait ParameterizedType extends ValueType { override implicit def projectContext: ProjectContext = designator.projectContext val designator: ScType val typeArguments: Seq[ScType] def substitutor: ScSubstitutor = Option(substitutorCache.get(this)).getOrElse { val result = substitutorInner substitutorCache.put(this, result) result } protected def substitutorInner: ScSubstitutor override def removeAbstracts = ParameterizedType(designator.removeAbstracts, typeArguments.map(_.removeAbstracts)) override def updateSubtypes(update: Update, visited: Set[ScType]): ValueType = { ParameterizedType( designator.recursiveUpdateImpl(update, visited), typeArguments.map(_.recursiveUpdateImpl(update, visited)) ) } override def typeDepth: Int = { val result = designator.typeDepth typeArguments.map(_.typeDepth) match { case Seq() => result //todo: shouldn't be possible case seq => result.max(seq.max + 1) } } override def isFinalType: Boolean = designator.isFinalType && typeArguments.filterBy(classOf[TypeParameterType]) .forall(_.isInvariant) } object ParameterizedType { val substitutorCache: ConcurrentMap[ParameterizedType, ScSubstitutor] = ContainerUtil.createConcurrentWeakMap[ParameterizedType, ScSubstitutor]() def apply(designator: ScType, typeArguments: Seq[ScType]): ValueType = designator.typeSystem.parameterizedType(designator, typeArguments) def unapply(parameterized: ParameterizedType): Option[(ScType, Seq[ScType])] = Some(parameterized.designator, parameterized.typeArguments) }
Example 2
Source File: CachedPulsarClientSuite.scala From pulsar-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.pulsar import java.util.concurrent.ConcurrentMap import java.{util => ju} import org.scalatest.PrivateMethodTester import org.apache.pulsar.client.api.PulsarClient import org.apache.spark.sql.test.SharedSQLContext class CachedPulsarClientSuite extends SharedSQLContext with PrivateMethodTester with PulsarTest { import PulsarOptions._ type KP = PulsarClient protected override def beforeEach(): Unit = { super.beforeEach() CachedPulsarClient.clear() } test("Should return the cached instance on calling getOrCreate with same params.") { val pulsarParams = new ju.HashMap[String, Object]() // Here only host should be resolvable, it does not need a running instance of pulsar server. pulsarParams.put(SERVICE_URL_OPTION_KEY, "pulsar://127.0.0.1:6650") pulsarParams.put("concurrentLookupRequest", "10000") val producer = CachedPulsarClient.getOrCreate(pulsarParams) val producer2 = CachedPulsarClient.getOrCreate(pulsarParams) assert(producer == producer2) val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap) val map = CachedPulsarClient.invokePrivate(cacheMap()) assert(map.size == 1) } test("Should close the correct pulsar producer for the given pulsarPrams.") { val pulsarParams = new ju.HashMap[String, Object]() pulsarParams.put(SERVICE_URL_OPTION_KEY, "pulsar://127.0.0.1:6650") pulsarParams.put("concurrentLookupRequest", "10000") val producer: KP = CachedPulsarClient.getOrCreate(pulsarParams) pulsarParams.put("concurrentLookupRequest", "20000") val producer2: KP = CachedPulsarClient.getOrCreate(pulsarParams) // With updated conf, a new producer instance should be created. assert(producer != producer2) val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap) val map = CachedPulsarClient.invokePrivate(cacheMap()) assert(map.size == 2) CachedPulsarClient.close(pulsarParams) val map2 = CachedPulsarClient.invokePrivate(cacheMap()) assert(map2.size == 1) import scala.collection.JavaConverters._ val (seq: Seq[(String, Object)], _producer: KP) = map2.asScala.toArray.apply(0) assert(_producer == producer) } }
Example 3
Source File: CachedPulsarClient.scala From pulsar-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.pulsar import java.{util => ju} import java.util.concurrent.{ConcurrentMap, ExecutionException, TimeUnit} import scala.collection.JavaConverters._ import scala.util.control.NonFatal import com.google.common.cache._ import com.google.common.util.concurrent.{ExecutionError, UncheckedExecutionException} import org.apache.spark.SparkEnv import org.apache.spark.internal.Logging private[pulsar] object CachedPulsarClient extends Logging { private type Client = org.apache.pulsar.client.api.PulsarClient private val defaultCacheExpireTimeout = TimeUnit.MINUTES.toMillis(10) private lazy val cacheExpireTimeout: Long = Option(SparkEnv.get) .map(_.conf .getTimeAsMs("spark.pulsar.client.cache.timeout", s"${defaultCacheExpireTimeout}ms")) .getOrElse(defaultCacheExpireTimeout) private val cacheLoader = new CacheLoader[Seq[(String, Object)], Client] { override def load(config: Seq[(String, Object)]): Client = { val configMap = config.map(x => x._1 -> x._2).toMap.asJava createPulsarClient(configMap) } } private val removalListener = new RemovalListener[Seq[(String, Object)], Client]() { override def onRemoval( notification: RemovalNotification[Seq[(String, Object)], Client]): Unit = { val paramsSeq: Seq[(String, Object)] = notification.getKey val client: Client = notification.getValue logDebug( s"Evicting pulsar producer $client params: $paramsSeq, due to ${notification.getCause}") close(paramsSeq, client) } } private lazy val guavaCache: LoadingCache[Seq[(String, Object)], Client] = CacheBuilder .newBuilder() .expireAfterAccess(cacheExpireTimeout, TimeUnit.MILLISECONDS) .removalListener(removalListener) .build[Seq[(String, Object)], Client](cacheLoader) private def createPulsarClient(pulsarConf: ju.Map[String, Object]): Client = { val pulsarServiceUrl = pulsarConf.get(PulsarOptions.SERVICE_URL_OPTION_KEY).asInstanceOf[String] val clientConf = new PulsarConfigUpdater( "pulsarClientCache", pulsarConf.asScala.toMap, PulsarOptions.FILTERED_KEYS ).rebuild() logInfo(s"Client Conf = ${clientConf}") try { val pulsarClient: Client = org.apache.pulsar.client.api.PulsarClient .builder() .serviceUrl(pulsarServiceUrl) .loadConf(clientConf) .build(); logDebug( s"Created a new instance of PulsarClient for serviceUrl = $pulsarServiceUrl," + s" clientConf = $clientConf.") pulsarClient } catch { case e: Throwable => logError( s"Failed to create PulsarClient to serviceUrl ${pulsarServiceUrl}" + s" using client conf ${clientConf}", e) throw e } } private def close(paramsSeq: Seq[(String, Object)], client: Client): Unit = { try { logInfo(s"Closing the Pulsar Client with params: ${paramsSeq.mkString("\n")}.") client.close() } catch { case NonFatal(e) => logWarning("Error while closing pulsar producer.", e) } } private[pulsar] def clear(): Unit = { logInfo("Cleaning up guava cache.") guavaCache.invalidateAll() } // Intended for testing purpose only. private def getAsMap: ConcurrentMap[Seq[(String, Object)], Client] = guavaCache.asMap() }
Example 4
Source File: ShuffleTestAccessor.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.network.shuffle import java.io.{IOException, File} import java.util.concurrent.ConcurrentMap import org.apache.hadoop.yarn.api.records.ApplicationId import org.fusesource.leveldbjni.JniDBFactory import org.iq80.leveldb.{DB, Options} import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo object ShuffleTestAccessor { def getBlockResolver(handler: ExternalShuffleBlockHandler): ExternalShuffleBlockResolver = { handler.blockManager } def getExecutorInfo( appId: ApplicationId, execId: String, resolver: ExternalShuffleBlockResolver ): Option[ExecutorShuffleInfo] = { val id = new AppExecId(appId.toString, execId) Option(resolver.executors.get(id)) } def registeredExecutorFile(resolver: ExternalShuffleBlockResolver): File = { resolver.registeredExecutorFile } def shuffleServiceLevelDB(resolver: ExternalShuffleBlockResolver): DB = { resolver.db } def reloadRegisteredExecutors( file: File): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { val options: Options = new Options options.createIfMissing(true) val factory = new JniDBFactory val db = factory.open(file, options) val result = ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) db.close() result } def reloadRegisteredExecutors( db: DB): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) } }
Example 5
Source File: OuterScopes.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.encoders import java.util.concurrent.ConcurrentMap import com.google.common.collect.MapMaker import org.apache.spark.util.Utils object OuterScopes { @transient lazy val outerScopes: ConcurrentMap[String, AnyRef] = new MapMaker().weakValues().makeMap() def getOuterScope(innerCls: Class[_]): () => AnyRef = { assert(innerCls.isMemberClass) val outerClassName = innerCls.getDeclaringClass.getName val outer = outerScopes.get(outerClassName) if (outer == null) { outerClassName match { // If the outer class is generated by REPL, users don't need to register it as it has // only one instance and there is a way to retrieve it: get the `$read` object, call the // `INSTANCE()` method to get the single instance of class `$read`. Then call `$iw()` // method multiply times to get the single instance of the inner most `$iw` class. case REPLClass(baseClassName) => () => { val objClass = Utils.classForName(baseClassName + "$") val objInstance = objClass.getField("MODULE$").get(null) val baseInstance = objClass.getMethod("INSTANCE").invoke(objInstance) val baseClass = Utils.classForName(baseClassName) var getter = iwGetter(baseClass) var obj = baseInstance while (getter != null) { obj = getter.invoke(obj) getter = iwGetter(getter.getReturnType) } if (obj == null) { throw new RuntimeException(s"Failed to get outer pointer for ${innerCls.getName}") } outerScopes.putIfAbsent(outerClassName, obj) obj } case _ => null } } else { () => outer } } private def iwGetter(cls: Class[_]) = { try { cls.getMethod("$iw") } catch { case _: NoSuchMethodException => null } } // The format of REPL generated wrapper class's name, e.g. `$line12.$read$$iw$$iw` private[this] val REPLClass = """^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$""".r }
Example 6
Source File: CachedKafkaProducerSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.kafka010 import java.{util => ju} import java.util.concurrent.ConcurrentMap import org.apache.kafka.clients.producer.KafkaProducer import org.apache.kafka.common.serialization.ByteArraySerializer import org.scalatest.PrivateMethodTester import org.apache.spark.sql.test.SharedSQLContext class CachedKafkaProducerSuite extends SharedSQLContext with PrivateMethodTester { type KP = KafkaProducer[Array[Byte], Array[Byte]] protected override def beforeEach(): Unit = { super.beforeEach() val clear = PrivateMethod[Unit]('clear) CachedKafkaProducer.invokePrivate(clear()) } test("Should return the cached instance on calling getOrCreate with same params.") { val kafkaParams = new ju.HashMap[String, Object]() kafkaParams.put("acks", "0") // Here only host should be resolvable, it does not need a running instance of kafka server. kafkaParams.put("bootstrap.servers", "127.0.0.1:9022") kafkaParams.put("key.serializer", classOf[ByteArraySerializer].getName) kafkaParams.put("value.serializer", classOf[ByteArraySerializer].getName) val producer = CachedKafkaProducer.getOrCreate(kafkaParams) val producer2 = CachedKafkaProducer.getOrCreate(kafkaParams) assert(producer == producer2) val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap) val map = CachedKafkaProducer.invokePrivate(cacheMap()) assert(map.size == 1) } test("Should close the correct kafka producer for the given kafkaPrams.") { val kafkaParams = new ju.HashMap[String, Object]() kafkaParams.put("acks", "0") kafkaParams.put("bootstrap.servers", "127.0.0.1:9022") kafkaParams.put("key.serializer", classOf[ByteArraySerializer].getName) kafkaParams.put("value.serializer", classOf[ByteArraySerializer].getName) val producer: KP = CachedKafkaProducer.getOrCreate(kafkaParams) kafkaParams.put("acks", "1") val producer2: KP = CachedKafkaProducer.getOrCreate(kafkaParams) // With updated conf, a new producer instance should be created. assert(producer != producer2) val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap) val map = CachedKafkaProducer.invokePrivate(cacheMap()) assert(map.size == 2) CachedKafkaProducer.close(kafkaParams) val map2 = CachedKafkaProducer.invokePrivate(cacheMap()) assert(map2.size == 1) import scala.collection.JavaConverters._ val (seq: Seq[(String, Object)], _producer: KP) = map2.asScala.toArray.apply(0) assert(_producer == producer) } }
Example 7
Source File: CachedKafkaProducer.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.kafka010 import java.{util => ju} import java.util.concurrent.{ConcurrentMap, ExecutionException, TimeUnit} import com.google.common.cache._ import com.google.common.util.concurrent.{ExecutionError, UncheckedExecutionException} import org.apache.kafka.clients.producer.KafkaProducer import scala.collection.JavaConverters._ import scala.util.control.NonFatal import org.apache.spark.SparkEnv import org.apache.spark.internal.Logging private[kafka010] object CachedKafkaProducer extends Logging { private type Producer = KafkaProducer[Array[Byte], Array[Byte]] private lazy val cacheExpireTimeout: Long = SparkEnv.get.conf.getTimeAsMs("spark.kafka.producer.cache.timeout", "10m") private val cacheLoader = new CacheLoader[Seq[(String, Object)], Producer] { override def load(config: Seq[(String, Object)]): Producer = { val configMap = config.map(x => x._1 -> x._2).toMap.asJava createKafkaProducer(configMap) } } private val removalListener = new RemovalListener[Seq[(String, Object)], Producer]() { override def onRemoval( notification: RemovalNotification[Seq[(String, Object)], Producer]): Unit = { val paramsSeq: Seq[(String, Object)] = notification.getKey val producer: Producer = notification.getValue logDebug( s"Evicting kafka producer $producer params: $paramsSeq, due to ${notification.getCause}") close(paramsSeq, producer) } } private lazy val guavaCache: LoadingCache[Seq[(String, Object)], Producer] = CacheBuilder.newBuilder().expireAfterAccess(cacheExpireTimeout, TimeUnit.MILLISECONDS) .removalListener(removalListener) .build[Seq[(String, Object)], Producer](cacheLoader) private def createKafkaProducer(producerConfiguration: ju.Map[String, Object]): Producer = { val kafkaProducer: Producer = new Producer(producerConfiguration) logDebug(s"Created a new instance of KafkaProducer for $producerConfiguration.") kafkaProducer } private def close(paramsSeq: Seq[(String, Object)], producer: Producer): Unit = { try { logInfo(s"Closing the KafkaProducer with params: ${paramsSeq.mkString("\n")}.") producer.close() } catch { case NonFatal(e) => logWarning("Error while closing kafka producer.", e) } } private def clear(): Unit = { logInfo("Cleaning up guava cache.") guavaCache.invalidateAll() } // Intended for testing purpose only. private def getAsMap: ConcurrentMap[Seq[(String, Object)], Producer] = guavaCache.asMap() }
Example 8
Source File: ShuffleTestAccessor.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.network.shuffle import java.io.File import java.util.concurrent.ConcurrentMap import org.apache.hadoop.yarn.api.records.ApplicationId import org.fusesource.leveldbjni.JniDBFactory import org.iq80.leveldb.{DB, Options} import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo object ShuffleTestAccessor { def getBlockResolver(handler: ExternalShuffleBlockHandler): ExternalShuffleBlockResolver = { handler.blockManager } def getExecutorInfo( appId: ApplicationId, execId: String, resolver: ExternalShuffleBlockResolver ): Option[ExecutorShuffleInfo] = { val id = new AppExecId(appId.toString, execId) Option(resolver.executors.get(id)) } def registeredExecutorFile(resolver: ExternalShuffleBlockResolver): File = { resolver.registeredExecutorFile } def shuffleServiceLevelDB(resolver: ExternalShuffleBlockResolver): DB = { resolver.db } def reloadRegisteredExecutors( file: File): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { val options: Options = new Options options.createIfMissing(true) val factory = new JniDBFactory val db = factory.open(file, options) val result = ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) db.close() result } def reloadRegisteredExecutors( db: DB): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) } }
Example 9
Source File: UserContext.scala From hazelcast-scala with Apache License 2.0 | 5 votes |
package com.hazelcast.Scala import java.util.concurrent.ConcurrentMap object UserContext { class Key[T](nameOrNull: String) extends Serializable { protected def this() = this(null) val name = if (nameOrNull != null) nameOrNull else getClass.getName } } final class UserContext private[Scala] (private val ctx: ConcurrentMap[String, Object]) extends AnyVal { def get[T](key: UserContext.Key[T]): Option[T] = Option(ctx.get(key.name).asInstanceOf[T]) def apply[T](key: UserContext.Key[T]): T = ctx.get(key.name).asInstanceOf[T] match { case null => sys.error(s"""Key "${key.name}" not found!""") case value => value } def update[T](key: UserContext.Key[T], value: T): Unit = value match { case null => ctx.remove(key.name) case _ => ctx.put(key.name, value.asInstanceOf[Object]) } def putIfAbsent[T](key: UserContext.Key[T], value: T): Option[T] = value match { case null => get(key) case _ => Option(ctx.putIfAbsent(key.name, value.asInstanceOf[Object]).asInstanceOf[T]) } def getOrElseUpdate[T](key: UserContext.Key[T], create: => T): T = { def getOrUpdate(locked: Boolean): T = { get(key) match { case Some(value) => value case None if locked => val value: T = create putIfAbsent(key, value) getOrElse value case None => key.name.intern.synchronized(getOrUpdate(locked = true)) } } getOrUpdate(locked = false) } }
Example 10
Source File: ShuffleTestAccessor.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.network.shuffle import java.io.File import java.util.concurrent.ConcurrentMap import org.apache.hadoop.yarn.api.records.ApplicationId import org.fusesource.leveldbjni.JniDBFactory import org.iq80.leveldb.{DB, Options} import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo object ShuffleTestAccessor { def getBlockResolver(handler: ExternalShuffleBlockHandler): ExternalShuffleBlockResolver = { handler.blockManager } def getExecutorInfo( appId: ApplicationId, execId: String, resolver: ExternalShuffleBlockResolver ): Option[ExecutorShuffleInfo] = { val id = new AppExecId(appId.toString, execId) Option(resolver.executors.get(id)) } def registeredExecutorFile(resolver: ExternalShuffleBlockResolver): File = { resolver.registeredExecutorFile } def shuffleServiceLevelDB(resolver: ExternalShuffleBlockResolver): DB = { resolver.db } def reloadRegisteredExecutors( file: File): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { val options: Options = new Options options.createIfMissing(true) val factory = new JniDBFactory val db = factory.open(file, options) val result = ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) db.close() result } def reloadRegisteredExecutors( db: DB): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) } }
Example 11
Source File: OuterScopes.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.encoders import java.util.concurrent.ConcurrentMap import com.google.common.collect.MapMaker import org.apache.spark.util.Utils object OuterScopes { @transient lazy val outerScopes: ConcurrentMap[String, AnyRef] = new MapMaker().weakValues().makeMap() def getOuterScope(innerCls: Class[_]): () => AnyRef = { assert(innerCls.isMemberClass) val outerClassName = innerCls.getDeclaringClass.getName val outer = outerScopes.get(outerClassName) if (outer == null) { outerClassName match { // If the outer class is generated by REPL, users don't need to register it as it has // only one instance and there is a way to retrieve it: get the `$read` object, call the // `INSTANCE()` method to get the single instance of class `$read`. Then call `$iw()` // method multiply times to get the single instance of the inner most `$iw` class. case REPLClass(baseClassName) => () => { val objClass = Utils.classForName(baseClassName + "$") val objInstance = objClass.getField("MODULE$").get(null) val baseInstance = objClass.getMethod("INSTANCE").invoke(objInstance) val baseClass = Utils.classForName(baseClassName) var getter = iwGetter(baseClass) var obj = baseInstance while (getter != null) { obj = getter.invoke(obj) getter = iwGetter(getter.getReturnType) } if (obj == null) { throw new RuntimeException(s"Failed to get outer pointer for ${innerCls.getName}") } outerScopes.putIfAbsent(outerClassName, obj) obj } case _ => null } } else { () => outer } } private def iwGetter(cls: Class[_]) = { try { cls.getMethod("$iw") } catch { case _: NoSuchMethodException => null } } // The format of REPL generated wrapper class's name, e.g. `$line12.$read$$iw$$iw` private[this] val REPLClass = """^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$""".r }
Example 12
Source File: OuterScopes.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.encoders import java.util.concurrent.ConcurrentMap import com.google.common.collect.MapMaker import org.apache.spark.util.Utils object OuterScopes { @transient lazy val outerScopes: ConcurrentMap[String, AnyRef] = new MapMaker().weakValues().makeMap() def getOuterScope(innerCls: Class[_]): () => AnyRef = { assert(innerCls.isMemberClass) val outerClassName = innerCls.getDeclaringClass.getName val outer = outerScopes.get(outerClassName) if (outer == null) { outerClassName match { // If the outer class is generated by REPL, users don't need to register it as it has // only one instance and there is a way to retrieve it: get the `$read` object, call the // `INSTANCE()` method to get the single instance of class `$read`. Then call `$iw()` // method multiply times to get the single instance of the inner most `$iw` class. case REPLClass(baseClassName) => () => { val objClass = Utils.classForName(baseClassName + "$") val objInstance = objClass.getField("MODULE$").get(null) val baseInstance = objClass.getMethod("INSTANCE").invoke(objInstance) val baseClass = Utils.classForName(baseClassName) var getter = iwGetter(baseClass) var obj = baseInstance while (getter != null) { obj = getter.invoke(obj) getter = iwGetter(getter.getReturnType) } if (obj == null) { throw new RuntimeException(s"Failed to get outer pointer for ${innerCls.getName}") } outerScopes.putIfAbsent(outerClassName, obj) obj } case _ => null } } else { () => outer } } private def iwGetter(cls: Class[_]) = { try { cls.getMethod("$iw") } catch { case _: NoSuchMethodException => null } } // The format of REPL generated wrapper class's name, e.g. `$line12.$read$$iw$$iw` private[this] val REPLClass = """^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$""".r }
Example 13
Source File: Equivalence.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.types.api import java.util.concurrent.ConcurrentMap import com.intellij.openapi.progress.ProgressManager import com.intellij.openapi.util.Computable import com.intellij.util.containers.ContainerUtil import org.jetbrains.plugins.scala.caches.RecursionManager import org.jetbrains.plugins.scala.lang.psi.types._ final def equivInner(left: ScType, right: ScType, substitutor: ScUndefinedSubstitutor = ScUndefinedSubstitutor(), falseUndef: Boolean = true): (Boolean, ScUndefinedSubstitutor) = { ProgressManager.checkCanceled() if (left == right) return (true, substitutor) if (!left.canBeSameClass(right)) return (false, substitutor) val key = (left, right, falseUndef) val nowEval = eval.get() val tuple = if (nowEval) null else { try { eval.set(true) cache.get(key) } finally { eval.set(false) } } if (tuple != null) { if (substitutor.isEmpty) return tuple return tuple.copy(_2 = substitutor + tuple._2) } if (guard.currentStackContains(key)) { return (false, ScUndefinedSubstitutor()) } val result = guard.doPreventingRecursion(key, equivComputable(left, right, ScUndefinedSubstitutor(), falseUndef)) if (result == null) return (false, ScUndefinedSubstitutor()) if (!nowEval) { try { eval.set(true) cache.put(key, result) } finally { eval.set(false) } } if (substitutor.isEmpty) return result result.copy(_2 = substitutor + result._2) } protected def equivComputable(left: ScType, right: ScType, substitutor: ScUndefinedSubstitutor, falseUndef: Boolean): Computable[(Boolean, ScUndefinedSubstitutor)] }
Example 14
Source File: Conformance.scala From intellij-lsp with Apache License 2.0 | 5 votes |
package org.jetbrains.plugins.scala.lang.psi.types.api import java.util.concurrent.ConcurrentMap import com.intellij.openapi.progress.ProgressManager import com.intellij.openapi.util.Computable import com.intellij.psi.PsiClass import com.intellij.util.containers.ContainerUtil import org.jetbrains.plugins.scala.caches.RecursionManager import org.jetbrains.plugins.scala.lang.psi.types._ final def conformsInner(left: ScType, right: ScType, visited: Set[PsiClass] = Set.empty, substitutor: ScUndefinedSubstitutor = ScUndefinedSubstitutor(), checkWeak: Boolean = false): Result = { ProgressManager.checkCanceled() if (left.isAny || right.isNothing || left == right) return (true, substitutor) if (!right.canBeSameOrInheritor(left)) return (false, substitutor) val key = (left, right, checkWeak) val tuple = cache.get(key) if (tuple != null) { if (substitutor.isEmpty) return tuple return tuple.copy(_2 = substitutor + tuple._2) } if (guard.currentStackContains(key)) { return (false, ScUndefinedSubstitutor()) } val res = guard.doPreventingRecursion(key, conformsComputable(left, right, visited, checkWeak)) if (res == null) return (false, ScUndefinedSubstitutor()) cache.put(key, res) if (substitutor.isEmpty) return res res.copy(_2 = substitutor + res._2) } def clearCache(): Unit = cache.clear() protected def conformsComputable(left: ScType, right: ScType, visited: Set[PsiClass], checkWeak: Boolean): Computable[(Boolean, ScUndefinedSubstitutor)] }
Example 15
Source File: ShuffleTestAccessor.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.network.shuffle import java.io.File import java.util.concurrent.ConcurrentMap import org.apache.hadoop.yarn.api.records.ApplicationId import org.fusesource.leveldbjni.JniDBFactory import org.iq80.leveldb.{DB, Options} import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo object ShuffleTestAccessor { def getBlockResolver(handler: ExternalShuffleBlockHandler): ExternalShuffleBlockResolver = { handler.blockManager } def getExecutorInfo( appId: ApplicationId, execId: String, resolver: ExternalShuffleBlockResolver ): Option[ExecutorShuffleInfo] = { val id = new AppExecId(appId.toString, execId) Option(resolver.executors.get(id)) } def registeredExecutorFile(resolver: ExternalShuffleBlockResolver): File = { resolver.registeredExecutorFile } def shuffleServiceLevelDB(resolver: ExternalShuffleBlockResolver): DB = { resolver.db } def reloadRegisteredExecutors( file: File): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { val options: Options = new Options options.createIfMissing(true) val factory = new JniDBFactory val db = factory.open(file, options) val result = ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) db.close() result } def reloadRegisteredExecutors( db: DB): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) } }
Example 16
Source File: OuterScopes.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.encoders import java.util.concurrent.ConcurrentMap import com.google.common.collect.MapMaker import org.apache.spark.util.Utils object OuterScopes { @transient lazy val outerScopes: ConcurrentMap[String, AnyRef] = new MapMaker().weakValues().makeMap() def getOuterScope(innerCls: Class[_]): () => AnyRef = { assert(innerCls.isMemberClass) val outerClassName = innerCls.getDeclaringClass.getName val outer = outerScopes.get(outerClassName) if (outer == null) { outerClassName match { // If the outer class is generated by REPL, users don't need to register it as it has // only one instance and there is a way to retrieve it: get the `$read` object, call the // `INSTANCE()` method to get the single instance of class `$read`. Then call `$iw()` // method multiply times to get the single instance of the inner most `$iw` class. case REPLClass(baseClassName) => () => { val objClass = Utils.classForName(baseClassName + "$") val objInstance = objClass.getField("MODULE$").get(null) val baseInstance = objClass.getMethod("INSTANCE").invoke(objInstance) val baseClass = Utils.classForName(baseClassName) var getter = iwGetter(baseClass) var obj = baseInstance while (getter != null) { obj = getter.invoke(obj) getter = iwGetter(getter.getReturnType) } if (obj == null) { throw new RuntimeException(s"Failed to get outer pointer for ${innerCls.getName}") } outerScopes.putIfAbsent(outerClassName, obj) obj } case _ => null } } else { () => outer } } private def iwGetter(cls: Class[_]) = { try { cls.getMethod("$iw") } catch { case _: NoSuchMethodException => null } } // The format of REPL generated wrapper class's name, e.g. `$line12.$read$$iw$$iw` private[this] val REPLClass = """^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$""".r }
Example 17
Source File: ConcurrentMapBackedCache.scala From openwhisk with Apache License 2.0 | 5 votes |
private class ConcurrentMapBackedCache[V](store: ConcurrentMap[Any, Future[V]]) { val cache = this def apply(key: Any) = new Keyed(key) class Keyed(key: Any) { def apply(magnet: => ValueMagnet[V])(implicit ec: ExecutionContext): Future[V] = cache.apply( key, () => try magnet.future catch { case NonFatal(e) => Future.failed(e) }) } def apply(key: Any, genValue: () => Future[V])(implicit ec: ExecutionContext): Future[V] = { store.computeIfAbsent( key, new java.util.function.Function[Any, Future[V]]() { override def apply(key: Any): Future[V] = { val future = genValue() future.onComplete { value => // in case of exceptions we remove the cache entry (i.e. try again later) if (value.isFailure) store.remove(key, future) } future } }) } def remove(key: Any) = Option(store.remove(key)) def size = store.size } class ValueMagnet[V](val future: Future[V]) object ValueMagnet { import scala.language.implicitConversions implicit def fromAny[V](block: V): ValueMagnet[V] = fromFuture(Future.successful(block)) implicit def fromFuture[V](future: Future[V]): ValueMagnet[V] = new ValueMagnet(future) }
Example 18
Source File: OuterScopes.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.encoders import java.util.concurrent.ConcurrentMap import com.google.common.collect.MapMaker import org.apache.spark.util.Utils object OuterScopes { @transient lazy val outerScopes: ConcurrentMap[String, AnyRef] = new MapMaker().weakValues().makeMap() def getOuterScope(innerCls: Class[_]): () => AnyRef = { assert(innerCls.isMemberClass) val outerClassName = innerCls.getDeclaringClass.getName val outer = outerScopes.get(outerClassName) if (outer == null) { outerClassName match { // If the outer class is generated by REPL, users don't need to register it as it has // only one instance and there is a way to retrieve it: get the `$read` object, call the // `INSTANCE()` method to get the single instance of class `$read`. Then call `$iw()` // method multiply times to get the single instance of the inner most `$iw` class. case REPLClass(baseClassName) => () => { val objClass = Utils.classForName(baseClassName + "$") val objInstance = objClass.getField("MODULE$").get(null) val baseInstance = objClass.getMethod("INSTANCE").invoke(objInstance) val baseClass = Utils.classForName(baseClassName) var getter = iwGetter(baseClass) var obj = baseInstance while (getter != null) { obj = getter.invoke(obj) getter = iwGetter(getter.getReturnType) } if (obj == null) { throw new RuntimeException(s"Failed to get outer pointer for ${innerCls.getName}") } outerScopes.putIfAbsent(outerClassName, obj) obj } case _ => null } } else { () => outer } } private def iwGetter(cls: Class[_]) = { try { cls.getMethod("$iw") } catch { case _: NoSuchMethodException => null } } // The format of REPL generated wrapper class's name, e.g. `$line12.$read$$iw$$iw` private[this] val REPLClass = """^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$""".r }
Example 19
Source File: EnsuringMap.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.mango.concurrent import java.util.concurrent.ConcurrentMap import scala.language.implicitConversions def ensureEntry(key: K, default: => V): V = { var result = map.get(key) if (result == null) { val value = default result = map.putIfAbsent(key, value) if (result == null) { result = value } } result } }
Example 20
Source File: ConcurrentConverters.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.mango.concurrent import java.util.concurrent.{ConcurrentMap, TimeUnit, TimeoutException} import com.kakao.shaded.netty.util.{HashedWheelTimer, Timeout, TimerTask} import scala.collection.JavaConversions._ import scala.concurrent.duration._ import scala.concurrent.{Future, Promise} import scala.language.implicitConversions def timeout(duration: Duration): Future[Nothing] = { val promise = Promise[Nothing]() timer.newTimeout(new TimerTask { override def run(timeout: Timeout): Unit = { promise.failure(new TimeoutException(s"Operation was timed out after $duration")) } }, duration.toMillis, TimeUnit.MILLISECONDS) promise.future } implicit def toRichFuture[T](future: Future[T])(implicit timeout: Duration = 5.seconds): RichFuture[T] = new RichFuture[T](future, timeout) implicit def toEnsuring[K, V](map: ConcurrentMap[K, V]): EnsuringMap[K, V] = new EnsuringMap(map) implicit def toEnsuring[K, V](map: scala.collection.concurrent.Map[K, V]): EnsuringMap[K, V] = new EnsuringMap(map) }
Example 21
Source File: ShuffleTestAccessor.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.network.shuffle import java.io.File import java.util.concurrent.ConcurrentMap import org.apache.hadoop.yarn.api.records.ApplicationId import org.fusesource.leveldbjni.JniDBFactory import org.iq80.leveldb.{DB, Options} import org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.AppExecId import org.apache.spark.network.shuffle.protocol.ExecutorShuffleInfo object ShuffleTestAccessor { def getBlockResolver(handler: ExternalShuffleBlockHandler): ExternalShuffleBlockResolver = { handler.blockManager } def getExecutorInfo( appId: ApplicationId, execId: String, resolver: ExternalShuffleBlockResolver ): Option[ExecutorShuffleInfo] = { val id = new AppExecId(appId.toString, execId) Option(resolver.executors.get(id)) } def registeredExecutorFile(resolver: ExternalShuffleBlockResolver): File = { resolver.registeredExecutorFile } def shuffleServiceLevelDB(resolver: ExternalShuffleBlockResolver): DB = { resolver.db } def reloadRegisteredExecutors( file: File): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { val options: Options = new Options options.createIfMissing(true) val factory = new JniDBFactory val db = factory.open(file, options) val result = ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) db.close() result } def reloadRegisteredExecutors( db: DB): ConcurrentMap[ExternalShuffleBlockResolver.AppExecId, ExecutorShuffleInfo] = { ExternalShuffleBlockResolver.reloadRegisteredExecutors(db) } }