java.nio.charset.Charset Scala Examples
The following examples show how to use java.nio.charset.Charset.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: WarcHeaders.scala From ArchiveSpark with MIT License | 6 votes |
package org.archive.archivespark.sparkling.warc import java.nio.charset.Charset import java.util.UUID import org.archive.archivespark.sparkling.Sparkling import org.archive.archivespark.sparkling.util.DigestUtil import org.joda.time.Instant import org.joda.time.format.{DateTimeFormat, DateTimeFormatter, ISODateTimeFormat} object WarcHeaders { val UTF8: Charset = Charset.forName(Sparkling.DefaultCharset) val ArcDateTimeFormat: DateTimeFormatter = DateTimeFormat.forPattern("yyyyMMddHHmmss").withZoneUTC val WarcDateTimeFormat: DateTimeFormatter = ISODateTimeFormat.dateTimeNoMillis val Br = "\r\n" def arcFile(info: WarcFileMeta, filename: String): Array[Byte] = { val header = StringBuilder.newBuilder header.append("filedesc://") header.append(filename) header.append(" 0.0.0.0 ") header.append(ArcDateTimeFormat.print(info.created)) header.append(" text/plain ") val headerBody = StringBuilder.newBuilder // Internet Archive: Name of gathering organization with no white space (http://archive.org/web/researcher/ArcFileFormat.php) headerBody.append("1 0 " + info.publisher.replace(" ", "")).append(Br) headerBody.append("URL IP-address Archive-date Content-type Archive-length").append(Br) val headerBodyStr: String = headerBody.toString val headerBodyBlob: Array[Byte] = headerBodyStr.getBytes(UTF8) header.append(headerBodyBlob.length).append(Br) header.append(headerBodyStr).append(Br) header.toString().getBytes(UTF8) } def warcFile(meta: WarcFileMeta, filename: String): Array[Byte] = { val header = StringBuilder.newBuilder header.append("WARC/1.0").append(Br) header.append("WARC-Type: warcinfo").append(Br) header.append("WARC-Date: " + WarcDateTimeFormat.print(Instant.now)).append(Br) header.append("WARC-Filename: " + filename).append(Br) header.append("WARC-Record-ID: " + newRecordID()).append(Br) header.append("Content-Type: application/warc-fields").append(Br) val headerBody = StringBuilder.newBuilder headerBody.append("software: " + meta.software).append(Br) headerBody.append("format: WARC File Format 1.0").append(Br) headerBody.append("conformsTo: http://bibnum.bnf.fr/WARC/WARC_ISO_28500_version1_latestdraft.pdf").append(Br) headerBody.append("publisher: " + meta.publisher).append(Br) headerBody.append("created: " + WarcDateTimeFormat.print(meta.created)).append(Br) headerBody.append(Br * 3) val headerBodyStr = headerBody.toString() val headerBodyBlob = headerBodyStr.getBytes(UTF8) header.append("Content-Length: " + headerBodyBlob.length).append(Br) header.append(Br) header.append(headerBodyStr) header.toString().getBytes(UTF8) } def warcResponseRecord(meta: WarcRecordMeta, content: Array[Byte], payload: Array[Byte]): Array[Byte] = { val header = StringBuilder.newBuilder header.append("WARC/1.0").append(Br) header.append("WARC-Type: response").append(Br) header.append("WARC-Target-URI: " + meta.url).append(Br) header.append("WARC-Date: " + WarcDateTimeFormat.print(meta.timestamp)).append(Br) header.append("WARC-Payload-Digest: sha1:" + DigestUtil.sha1Base32(payload)).append(Br) if (meta.ip.isDefined) header.append("WARC-IP-Address: " + meta.ip.get).append(Br) header.append("WARC-Record-ID: " + meta.recordId.getOrElse(newRecordID())).append(Br) header.append("Content-Type: application/http; msgtype=response").append(Br) header.append("Content-Length: " + content.length).append(Br) header.append(Br) header.toString().getBytes(UTF8) } def http(statusLine: String, headers: Map[String, String]): Array[Byte] = { val header = StringBuilder.newBuilder header.append(statusLine).append(Br) for ((key, value) <- headers) { header.append(s"$key: $value").append(Br) } header.append(Br) header.toString().getBytes(UTF8) } private def newRecordID(): String = "<urn:uuid:" + UUID.randomUUID() + ">" }
Example 2
Source File: ImmutableMigrationsSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.on.sql import java.io.{BufferedReader, FileNotFoundException} import java.math.BigInteger import java.nio.charset.Charset import java.security.MessageDigest import java.util import com.daml.ledger.on.sql.ImmutableMigrationsSpec._ import org.flywaydb.core.Flyway import org.flywaydb.core.api.configuration.FluentConfiguration import org.flywaydb.core.internal.resource.LoadableResource import org.flywaydb.core.internal.scanner.{LocationScannerCache, ResourceNameCache, Scanner} import org.scalatest.Matchers._ import org.scalatest.WordSpec import scala.collection.JavaConverters._ class ImmutableMigrationsSpec extends WordSpec { "migration files" should { "never change, according to their accompanying digest file" in { val configuration = Flyway .configure() .locations(s"classpath:/$migrationsResourcePath") val resourceScanner = flywayScanner(configuration) val resources = resourceScanner.getResources("", ".sql").asScala.toSeq resources.size should be >= 3 resources.foreach { resource => val migrationFile = resource.getRelativePath val digestFile = migrationFile + ".sha256" val expectedDigest = readExpectedDigest(migrationFile, digestFile, resourceScanner) val currentDigest = computeCurrentDigest(resource, configuration.getEncoding) assert( currentDigest == expectedDigest, s"""The contents of the migration file "$migrationFile" have changed! Migrations are immutable; you must not change their contents or their digest.""", ) } } } } object ImmutableMigrationsSpec { private val migrationsResourcePath = "com/daml/ledger/on/sql/migrations" private val hashMigrationsScriptPath = "ledger/ledger-on-sql/hash-migrations.sh" private def flywayScanner(configuration: FluentConfiguration) = new Scanner( classOf[Object], util.Arrays.asList(configuration.getLocations: _*), getClass.getClassLoader, configuration.getEncoding, new ResourceNameCache, new LocationScannerCache, ) private def readExpectedDigest( sourceFile: String, digestFile: String, resourceScanner: Scanner[_], ): String = { val resource = Option(resourceScanner.getResource(digestFile)) .getOrElse(throw new FileNotFoundException( s"""\"$digestFile\" is missing. If you are introducing a new Flyway migration step, you need to create an SHA-256 digest file by running $hashMigrationsScriptPath.""")) new BufferedReader(resource.read()).readLine() } private def computeCurrentDigest(resource: LoadableResource, encoding: Charset): String = { val sha256 = MessageDigest.getInstance("SHA-256") new BufferedReader(resource.read()) .lines() .forEach(line => sha256.update((line + "\n").getBytes(encoding))) val digest = sha256.digest() String.format(s"%0${digest.length * 2}x", new BigInteger(1, digest)) } }
Example 3
Source File: FlywayMigrationsSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.store import java.math.BigInteger import java.nio.charset.Charset import java.security.MessageDigest import com.daml.platform.store.FlywayMigrationsSpec._ import org.apache.commons.io.IOUtils import org.flywaydb.core.api.configuration.FluentConfiguration import org.flywaydb.core.api.migration.JavaMigration import org.flywaydb.core.internal.resource.LoadableResource import org.flywaydb.core.internal.scanner.{LocationScannerCache, ResourceNameCache, Scanner} import org.scalatest.Matchers._ import org.scalatest.WordSpec import scala.collection.JavaConverters._ // SQL MIGRATION AND THEIR DIGEST FILES SHOULD BE CREATED ONLY ONCE AND NEVER CHANGED AGAIN, // OTHERWISE MIGRATIONS BREAK ON EXISTING DEPLOYMENTS! class FlywayMigrationsSpec extends WordSpec { "Postgres flyway migration files" should { "always have a valid SHA-256 digest file accompanied" in { assertFlywayMigrationFileHashes(DbType.Postgres) } } "H2 database flyway migration files" should { "always have a valid SHA-256 digest file accompanied" in { assertFlywayMigrationFileHashes(DbType.H2Database) } } } object FlywayMigrationsSpec { private val digester = MessageDigest.getInstance("SHA-256") private def assertFlywayMigrationFileHashes(dbType: DbType): Unit = { val config = FlywayMigrations.configurationBase(dbType) val resourceScanner = scanner(config) val resources = resourceScanner.getResources("", ".sql").asScala.toSeq resources.size should be > 10 resources.foreach { res => val fileName = res.getFilename val expectedDigest = getExpectedDigest(fileName, fileName.dropRight(4) + ".sha256", resourceScanner) val currentDigest = getCurrentDigest(res, config.getEncoding) assert( currentDigest == expectedDigest, s"Digest of migration file $fileName has changed! It is NOT allowed to change neither existing sql migrations files nor their digests!" ) } } private def scanner(config: FluentConfiguration) = new Scanner( classOf[JavaMigration], config.getLocations.toList.asJava, getClass.getClassLoader, config.getEncoding, new ResourceNameCache, new LocationScannerCache, ) private def getExpectedDigest( sourceFile: String, digestFile: String, resourceScanner: Scanner[_], ) = IOUtils.toString( Option(resourceScanner.getResource(digestFile)) .getOrElse(sys.error(s"""Missing sha-256 file $digestFile! |Are you introducing a new Flyway migration step? |You need to create a sha-256 digest file by either running: | - shasum -a 256 $sourceFile | awk '{print $$1}' > $digestFile (under the db/migration folder) | - or ledger/sandbox/src/main/resources/db/migration/recompute-sha256sums.sh |""".stripMargin)) .read()) private def getCurrentDigest(res: LoadableResource, encoding: Charset) = { val digest = digester.digest(IOUtils.toByteArray(res.read(), encoding)) String.format(s"%0${digest.length * 2}x\n", new BigInteger(1, digest)) } }
Example 4
Source File: JwtSigner.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.jwt import java.nio.charset.Charset import java.security.interfaces.{ECPrivateKey, RSAPrivateKey} import com.auth0.jwt.algorithms.Algorithm import scalaz.syntax.traverse._ import scalaz.{Show, \/} import scalaz.syntax.show._ object JwtSigner { private val charset = Charset.forName("ASCII") object HMAC256 { def sign(jwt: domain.DecodedJwt[String], secret: String): Error \/ domain.Jwt = for { base64Jwt <- base64Encode(jwt) algorithm <- \/.fromTryCatchNonFatal(Algorithm.HMAC256(secret)) .leftMap(e => Error(Symbol("HMAC256.sign"), e.getMessage)) signature <- \/.fromTryCatchNonFatal(algorithm.sign(base64Jwt.header, base64Jwt.payload)) .leftMap(e => Error(Symbol("HMAC256.sign"), e.getMessage)) base64Signature <- base64Encode(signature) } yield domain.Jwt( s"${str(base64Jwt.header): String}.${str(base64Jwt.payload)}.${str(base64Signature): String}") } object RSA256 { def sign(jwt: domain.DecodedJwt[String], privateKey: RSAPrivateKey): Error \/ domain.Jwt = for { base64Jwt <- base64Encode(jwt) algorithm <- \/.fromTryCatchNonFatal(Algorithm.RSA256(null, privateKey)) .leftMap(e => Error(Symbol("RSA256.sign"), e.getMessage)) signature <- \/.fromTryCatchNonFatal(algorithm.sign(base64Jwt.header, base64Jwt.payload)) .leftMap(e => Error(Symbol("RSA256.sign"), e.getMessage)) base64Signature <- base64Encode(signature) } yield domain.Jwt( s"${str(base64Jwt.header): String}.${str(base64Jwt.payload)}.${str(base64Signature): String}") } object ECDSA { def sign( jwt: domain.DecodedJwt[String], privateKey: ECPrivateKey, algorithm: ECPrivateKey => Algorithm): Error \/ domain.Jwt = for { base64Jwt <- base64Encode(jwt) algorithm <- \/.fromTryCatchNonFatal(algorithm(privateKey)) .leftMap(e => Error(Symbol(algorithm.getClass.getTypeName), e.getMessage)) signature <- \/.fromTryCatchNonFatal(algorithm.sign(base64Jwt.header, base64Jwt.payload)) .leftMap(e => Error(Symbol(algorithm.getClass.getTypeName), e.getMessage)) base64Signature <- base64Encode(signature) } yield domain.Jwt( s"${str(base64Jwt.header): String}.${str(base64Jwt.payload)}.${str(base64Signature): String}") } private def str(bs: Array[Byte]) = new String(bs, charset) private def base64Encode(a: domain.DecodedJwt[String]): Error \/ domain.DecodedJwt[Array[Byte]] = a.traverse(base64Encode) private def base64Encode(str: String): Error \/ Array[Byte] = base64Encode(str.getBytes) private def base64Encode(bs: Array[Byte]): Error \/ Array[Byte] = Base64 .encodeWithoutPadding(bs) .leftMap(e => Error('base64Encode, e.shows)) final case class Error(what: Symbol, message: String) object Error { implicit val showInstance: Show[Error] = Show.shows(e => s"JwtSigner.Error: ${e.what}, ${e.message}") } }
Example 5
Source File: DarManifestReaderTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.archive import java.io.{ByteArrayInputStream, InputStream} import java.nio.charset.Charset import com.daml.lf.archive.DarManifestReader.DarManifestReaderException import org.scalatest.{Inside, Matchers, WordSpec} import scala.util.{Failure, Success} class DarManifestReaderTest extends WordSpec with Matchers with Inside { private val unicode = Charset.forName("UTF-8") "should read dalf names from manifest, real scenario with Dalfs line split" in { val manifest = """Manifest-Version: 1.0 |Created-By: Digital Asset packager (DAML-GHC) |Main-Dalf: com.daml.lf.archive:DarReaderTest:0.1.dalf |Dalfs: com.daml.lf.archive:DarReaderTest:0.1.dalf, daml-pri | m.dalf |Format: daml-lf |Encryption: non-encrypted""".stripMargin val inputStream: InputStream = new ByteArrayInputStream(manifest.getBytes(unicode)) val actual = DarManifestReader.dalfNames(inputStream) actual shouldBe Success( Dar("com.daml.lf.archive:DarReaderTest:0.1.dalf", List("daml-prim.dalf"))) inputStream.close() } "should read dalf names from manifest, Main-Dalf returned in the head" in { val manifest = """Main-Dalf: A.dalf |Dalfs: B.dalf, C.dalf, A.dalf, E.dalf |Format: daml-lf |Encryption: non-encrypted""".stripMargin val inputStream: InputStream = new ByteArrayInputStream(manifest.getBytes(unicode)) val actual = DarManifestReader.dalfNames(inputStream) actual shouldBe Success(Dar("A.dalf", List("B.dalf", "C.dalf", "E.dalf"))) inputStream.close() } "should read dalf names from manifest, can handle one Dalf per manifest" in { val manifest = """Main-Dalf: A.dalf |Dalfs: A.dalf |Format: daml-lf |Encryption: non-encrypted""".stripMargin val inputStream: InputStream = new ByteArrayInputStream(manifest.getBytes(unicode)) val actual = DarManifestReader.dalfNames(inputStream) actual shouldBe Success(Dar("A.dalf", List.empty)) inputStream.close() } "should return failure if Format is not daml-lf" in { val manifest = """Main-Dalf: A.dalf |Dalfs: B.dalf, C.dalf, A.dalf, E.dalf |Format: anything-different-from-daml-lf |Encryption: non-encrypted""".stripMargin val inputStream: InputStream = new ByteArrayInputStream(manifest.getBytes(unicode)) val actual = DarManifestReader.dalfNames(inputStream) inside(actual) { case Failure(DarManifestReaderException(msg)) => msg shouldBe "Unsupported format: anything-different-from-daml-lf" } inputStream.close() } }
Example 6
Source File: Utils.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.utils.tf.loaders import java.nio.ByteOrder import java.nio.charset.Charset import java.util import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric import com.intel.analytics.bigdl.utils.tf.Context import com.intel.analytics.bigdl.utils.tf.TensorflowToBigDL.toTensor import org.tensorflow.framework.{AttrValue, DataType, NodeDef} import scala.reflect.ClassTag import collection.JavaConverters._ object Utils { private[loaders] def getOrSetTensor[T: ClassTag]( node: NodeDef, context: Context[T], byteOrder: ByteOrder, trans: Option[Seq[(Int, Int)]] = None)( implicit ev: TensorNumeric[T]): (Tensor[T], Tensor[T]) = { if (context.containsTensor(node.getName)) { val result = context(node.getName) (result._1, result._2) } else { var weight = toTensor(node.getAttrMap.get("value").getTensor, byteOrder) .asInstanceOf[Tensor[T]] trans match { case Some(transposes) => for ((first, second) <- transposes) { weight = weight.transpose(first, second) } weight = weight.contiguous() case _ => } val gradient = Tensor[T](weight.size()) context.putTensor(node.getName, (weight, gradient, trans)) (weight, gradient) } } private[loaders] def getString(attrMap: util.Map[String, AttrValue], key: String): String = { require(attrMap.containsKey(key), s"Operation doesn't contain attributed $key") attrMap.get(key).getS.toString(Charset.defaultCharset()) } private[loaders] def getString(nodeDef: NodeDef, key: String): String = { getString(nodeDef.getAttrMap, key) } private[loaders] def getInt(attrMap: util.Map[String, AttrValue], key: String): Int = { require(attrMap.containsKey(key), s"Operation doesn't contain attributed $key") attrMap.get(key).getI.toInt } private[loaders] def getFloat(attrMap: util.Map[String, AttrValue], key: String): Float = { require(attrMap.containsKey(key), s"Operation doesn't contain attributed $key") attrMap.get(key).getF } private[loaders] def getBoolean(attrMap: util.Map[String, AttrValue], key: String): Boolean = { require(attrMap.containsKey(key), s"Operation doesn't contain attributed $key") attrMap.get(key).getB } private[loaders] def getBoolean(nodeDef: NodeDef, key: String): Boolean = { getBoolean(nodeDef.getAttrMap, key) } private[loaders] def getIntList(attrMap: util.Map[String, AttrValue], key: String): Seq[Int] = { require(attrMap.containsKey(key), s"Operation doesn't contain attributed $key") attrMap.get(key).getList.getIList.asScala.map(_.toInt) } private[loaders] def getType(attrMap: util.Map[String, AttrValue], key: String): DataType = { require(attrMap.containsKey(key), s"Operation doesn't contain attributed $key") attrMap.get(key).getType } private[loaders] def getType(nodeDef: NodeDef, key: String): DataType = { getType(nodeDef.getAttrMap, key) } private[loaders] def toArray[T: ClassTag](tensor: Tensor[T]): Array[T] = { require(tensor.nDimension() == 1, "require 1D tensor") val array = new Array[T](tensor.nElement()) var i = 0 while(i < array.length) { array(i) = tensor.valueAt(i + 1) i += 1 } array } }
Example 7
Source File: Conv3DBackpropInputV2Spec.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.utils.tf.loaders import java.nio.charset.Charset import com.google.protobuf.ByteString import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.tf.{PaddingType, TensorflowSpecHelper} import org.tensorflow.framework.{AttrValue, DataType, NodeDef} import com.intel.analytics.bigdl.utils.tf.Tensorflow._ class Conv3DBackpropInputV2Spec extends TensorflowSpecHelper { "Conv3DBackpropInputV2 forward with VALID padding" should "be correct" in { val dataFormat = AttrValue.newBuilder().setS(ByteString .copyFrom("NDHWC", Charset.defaultCharset())).build() val builder = NodeDef.newBuilder() .setName(s"Conv3DBackpropInputV2Test") .setOp("Conv3DBackpropInputV2") .putAttr("T", typeAttr(DataType.DT_FLOAT)) .putAttr("strides", listIntAttr(Seq(1, 1, 2, 3, 1))) .putAttr("padding", PaddingType.PADDING_VALID.value) .putAttr("data_format", dataFormat) val inputSize = Tensor[Int](Array(4, 20, 30, 40, 3), Array(5)) val filter = Tensor[Float](2, 3, 4, 3, 4).rand() val outputBackprop = Tensor[Float](4, 19, 14, 13, 4).rand() compare[Float]( builder, Seq(inputSize, filter, outputBackprop), 0, 1e-4 ) } "Conv3DBackpropInputV2 forward with SAME padding" should "be correct" in { val dataFormat = AttrValue.newBuilder().setS(ByteString .copyFrom("NDHWC", Charset.defaultCharset())).build() val builder = NodeDef.newBuilder() .setName(s"Conv3DBackpropInputV2Test") .setOp("Conv3DBackpropInputV2") .putAttr("T", typeAttr(DataType.DT_FLOAT)) .putAttr("strides", listIntAttr(Seq(1, 1, 2, 3, 1))) .putAttr("padding", PaddingType.PADDING_SAME.value) .putAttr("data_format", dataFormat) val inputSize = Tensor[Int](Array(4, 20, 30, 40, 3), Array(5)) val filter = Tensor[Float](2, 3, 4, 3, 4).rand() val outputBackprop = Tensor[Float](4, 20, 15, 14, 4).rand() compare[Float]( builder, Seq(inputSize, filter, outputBackprop), 0, 1e-4 ) } }
Example 8
Source File: Conv3DSpec.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.utils.tf.loaders import java.nio.charset.Charset import com.google.protobuf.ByteString import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.tf.{PaddingType, TensorflowSpecHelper} import org.tensorflow.framework.{AttrValue, DataType, NodeDef} import com.intel.analytics.bigdl.utils.tf.Tensorflow._ class Conv3DSpec extends TensorflowSpecHelper { "Conv3D forward with VALID padding" should "be correct" in { val dataFormat = AttrValue.newBuilder().setS(ByteString .copyFrom("NDHWC", Charset.defaultCharset())).build() val builder = NodeDef.newBuilder() .setName(s"Conv3DTest") .setOp("Conv3D") .putAttr("T", typeAttr(DataType.DT_FLOAT)) .putAttr("strides", listIntAttr(Seq(1, 1, 2, 3, 1))) .putAttr("padding", PaddingType.PADDING_VALID.value) .putAttr("data_format", dataFormat) val input = Tensor[Float](4, 20, 30, 40, 3).rand() val filter = Tensor[Float](2, 3, 4, 3, 4).rand() compare[Float]( builder, Seq(input, filter), 0, 1e-4 ) } "Conv3D forward with SAME padding" should "be correct" in { val dataFormat = AttrValue.newBuilder().setS(ByteString .copyFrom("NDHWC", Charset.defaultCharset())).build() val builder = NodeDef.newBuilder() .setName(s"Conv3DTest") .setOp("Conv3D") .putAttr("T", typeAttr(DataType.DT_FLOAT)) .putAttr("strides", listIntAttr(Seq(1, 1, 2, 3, 1))) .putAttr("padding", PaddingType.PADDING_SAME.value) .putAttr("data_format", dataFormat) val input = Tensor[Float](4, 20, 30, 40, 3).rand() val filter = Tensor[Float](2, 3, 4, 3, 4).rand() compare[Float]( builder, Seq(input, filter), 0, 1e-4 ) } }
Example 9
Source File: Conv3DBackpropFilterV2Spec.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.utils.tf.loaders import java.nio.charset.Charset import com.google.protobuf.ByteString import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.tf.{PaddingType, TensorflowSpecHelper} import org.tensorflow.framework.{AttrValue, DataType, NodeDef} import com.intel.analytics.bigdl.utils.tf.Tensorflow._ class Conv3DBackpropFilterV2Spec extends TensorflowSpecHelper { "Conv3DBackpropFilter forward with VALID padding" should "be correct" in { val dataFormat = AttrValue.newBuilder().setS(ByteString .copyFrom("NDHWC", Charset.defaultCharset())).build() val builder = NodeDef.newBuilder() .setName(s"Conv3DBackpropFilterV2Test") .setOp("Conv3DBackpropFilterV2") .putAttr("T", typeAttr(DataType.DT_FLOAT)) .putAttr("strides", listIntAttr(Seq(1, 1, 2, 3, 1))) .putAttr("padding", PaddingType.PADDING_VALID.value) .putAttr("data_format", dataFormat) val input = Tensor[Float](4, 20, 30, 40, 3).rand() val filter = Tensor[Int](Array(2, 3, 4, 3, 4), Array(5)) val outputBackprop = Tensor[Float](4, 19, 14, 13, 4).rand() // the output in this case is typical the scale of thousands, // so it is ok to have 1e-2 absolute error tolerance compare[Float]( builder, Seq(input, filter, outputBackprop), 0, 1e-2 ) } "Conv3DBackpropFilter forward with SAME padding" should "be correct" in { val dataFormat = AttrValue.newBuilder().setS(ByteString .copyFrom("NDHWC", Charset.defaultCharset())).build() val builder = NodeDef.newBuilder() .setName(s"Conv3DBackpropFilterV2Test") .setOp("Conv3DBackpropFilterV2") .putAttr("T", typeAttr(DataType.DT_FLOAT)) .putAttr("strides", listIntAttr(Seq(1, 1, 2, 3, 1))) .putAttr("padding", PaddingType.PADDING_SAME.value) .putAttr("data_format", dataFormat) val input = Tensor[Float](4, 20, 30, 40, 3).rand() val filter = Tensor[Int](Array(2, 3, 4, 3, 4), Array(5)) val outputBackprop = Tensor[Float](4, 20, 15, 14, 4).rand() // the output in this case is typical the scale of thousands, // so it is ok to have 1e-2 absolute error tolerance compare[Float]( builder, Seq(input, filter, outputBackprop), 0, 1e-2 ) } }
Example 10
Source File: ByteArrayCompanion.scala From jvm-toxcore-api with GNU General Public License v3.0 | 5 votes |
package im.tox.core.typesafe import java.nio.charset.Charset import im.tox.core.Functional._ import im.tox.core.error.CoreError abstract class ByteArrayCompanion[T <: AnyVal, S <: Security]( toValue: T => Array[Byte] ) extends WrappedValueCompanion[Array[Byte], T, S](toValue) { private val UTF_8 = Charset.forName("UTF-8") final override def equals(a: T, b: T): Boolean = { toValue(a) sameElements toValue(b) } final def fromString(value: String): CoreError \/ T = { fromValue(value.getBytes(UTF_8)) } final def fromHexString(string: String): CoreError \/ T = { for { bytes <- parseHexEncodedString { // Class name is SomeClass$ because this is the companion object, but T // is SomeClass, so we drop the $. val className = getClass.getSimpleName.dropRight(1) if (string.startsWith(className)) { string.substring(className.length + 1, string.length - 1) } else { string } } self <- fromValue(bytes) } yield { self } } private def parseHexEncodedString(id: String): CoreError \/ Array[Byte] = { foldDisjunctionList((0 until id.length / 2).reverseMap { i => for { hiNibble <- fromHexDigit(id, i * 2) loNibble <- fromHexDigit(id, i * 2 + 1) } yield { ((hiNibble << 4) + loNibble).toByte } }).map(_.toArray) } private def fromHexDigit(id: String, position: Int): CoreError \/ Byte = { val c = id.charAt(position) val digit: \/[CoreError, Int] = if (false) { \/-(0) } else if ('0' to '9' contains c) { \/-(c - '0') } else if ('A' to 'F' contains c) { \/-(c - 'A' + 10) } else if ('a' to 'f' contains c) { \/-(c - 'a' + 10) } else { -\/(CoreError.InvalidFormat(s"Non-hex digit character at position $position: $c")) } digit.map(_.toByte) } }
Example 11
Source File: PlayJsonSupport.scala From incubator-s2graph with Apache License 2.0 | 5 votes |
package org.apache.s2graph.http import java.nio.charset.Charset import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model._ import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import play.api.libs.json._ trait PlayJsonSupport { private val mediaTypes: Seq[MediaType.WithFixedCharset] = Seq(MediaType.applicationWithFixedCharset("json", HttpCharsets.`UTF-8`, "js")) private val unmarshallerContentTypes: Seq[ContentTypeRange] = mediaTypes.map(ContentTypeRange.apply) implicit val playJsonMarshaller: ToEntityMarshaller[JsValue] = { Marshaller.oneOf(mediaTypes: _*) { mediaType => Marshaller.withFixedContentType(ContentType(mediaType)) { json => HttpEntity(mediaType, json.toString) } } } implicit val playJsonUnmarshaller: FromEntityUnmarshaller[JsValue] = { Unmarshaller.byteStringUnmarshaller .forContentTypes(unmarshallerContentTypes: _*) .map { case ByteString.empty => throw Unmarshaller.NoContentException case data => Json.parse(data.decodeString(Charset.forName("UTF-8"))) } } trait ToPlayJson[T] { def toJson(msg: T): JsValue } import scala.language.reflectiveCalls object ToPlayJson { type ToPlayJsonReflective = { def toJson: JsValue } implicit def forToJson[A <: ToPlayJsonReflective] = new ToPlayJson[A] { def toJson(js: A) = js.toJson } implicit def forPlayJson[A <: JsValue] = new ToPlayJson[A] { def toJson(js: A) = js } } implicit object JsErrorJsonWriter extends Writes[JsError] { def writes(o: JsError): JsValue = Json.obj( "errors" -> JsArray( o.errors.map { case (path, validationErrors) => Json.obj( "path" -> Json.toJson(path.toString()), "validationErrors" -> JsArray(validationErrors.map(validationError => Json.obj( "message" -> JsString(validationError.message), "args" -> JsArray(validationError.args.map { case x: Int => JsNumber(x) case x => JsString(x.toString) }) ))) ) } ) ) } }
Example 12
Source File: SangriaGraphQLSupport.scala From incubator-s2graph with Apache License 2.0 | 5 votes |
package org.apache.s2graph.http import java.nio.charset.Charset import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model._ import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import sangria.ast.Document import sangria.parser.QueryParser import sangria.renderer.{QueryRenderer, QueryRendererConfig} trait SangriaGraphQLSupport { private val mediaTypes: Seq[MediaType.WithFixedCharset] = Seq(MediaType.applicationWithFixedCharset("graphql", HttpCharsets.`UTF-8`, "graphql")) private val unmarshallerContentTypes: Seq[ContentTypeRange] = mediaTypes.map(ContentTypeRange.apply) implicit def documentMarshaller(implicit config: QueryRendererConfig = QueryRenderer.Compact): ToEntityMarshaller[Document] = { Marshaller.oneOf(mediaTypes: _*) { mediaType ⇒ Marshaller.withFixedContentType(ContentType(mediaType)) { json ⇒ HttpEntity(mediaType, QueryRenderer.render(json, config)) } } } implicit val documentUnmarshaller: FromEntityUnmarshaller[Document] = { Unmarshaller.byteStringUnmarshaller .forContentTypes(unmarshallerContentTypes: _*) .map { case ByteString.empty ⇒ throw Unmarshaller.NoContentException case data ⇒ import sangria.parser.DeliveryScheme.Throw QueryParser.parse(data.decodeString(Charset.forName("UTF-8"))) } } }
Example 13
Source File: SimpleRecordConverter.scala From spark-ml-serving with Apache License 2.0 | 5 votes |
package io.hydrosphere.spark_ml_serving.common.reader import java.nio.charset.{Charset, CharsetDecoder} import parquet.io.api.{Binary, Converter, GroupConverter, PrimitiveConverter} import parquet.schema.{GroupType, OriginalType, Type} import scala.collection.JavaConversions._ class SimpleRecordConverter(schema: GroupType, name: String, parent: SimpleRecordConverter) extends GroupConverter { val UTF8: Charset = Charset.forName("UTF-8") val UTF8_DECODER: CharsetDecoder = UTF8.newDecoder() var converters: Array[Converter] = schema.getFields.map(createConverter).toArray[Converter] var record: SimpleRecord = _ private def createConverter(field: Type): Converter = { if (field.isPrimitive) { val originalType = field.getOriginalType originalType match { case OriginalType.UTF8 => return new StringConverter(field.getName) case _ => Unit } return new SimplePrimitiveConverter(field.getName) } new SimpleRecordConverter(field.asGroupType(), field.getName, this) } override def getConverter(i: Int): Converter = { converters(i) } override def start(): Unit = { record = new SimpleRecord() } override def end(): Unit = { if (parent != null) { parent.record.add(name, record) } } private class StringConverter(name: String) extends SimplePrimitiveConverter(name) { override def addBinary(value: Binary): Unit = { record.add(name, value.toStringUsingUTF8) } } private class SimplePrimitiveConverter(name: String) extends PrimitiveConverter { override def addBinary(value: Binary): Unit = { val bytes = value.getBytes if (bytes == null) { record.add(name, null) return } try { val buffer = UTF8_DECODER.decode(value.toByteBuffer) record.add(name, buffer.toString) } catch { case _: Throwable => Unit } } override def addBoolean(value: Boolean) { record.add(name, value) } override def addDouble(value: Double) { record.add(name, value) } override def addFloat(value: Float) { record.add(name, value) } override def addInt(value: Int) { record.add(name, value) } override def addLong(value: Long) { record.add(name, value) } } }
Example 14
Source File: Json4sSerializer.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.akkapersistence.json4s import java.nio.ByteBuffer import java.nio.charset.Charset import akka.actor.ExtendedActorSystem import akka.serialization.Serializer import org.json4s.native.Serialization._ import org.json4s.{DefaultFormats, Formats, TypeHints} import rhttpc.transport.json4s.{AllTypeHints, ObjectSerializer} class Json4sSerializer(system: ExtendedActorSystem) extends Serializer { import Json4sSerializer._ import rhttpc.transport.json4s.CommonFormats._ override def identifier: Int = ID override def includeManifest: Boolean = true override def fromBinary(bytes: Array[Byte], manifestOpt: Option[Class[_]]): AnyRef = { implicit val manifest = manifestOpt match { case Some(x) => Manifest.classType(x) case None => Manifest.AnyRef } read(new String(bytes, UTF8)) } override def toBinary(o: AnyRef): Array[Byte] = { writePretty(o).getBytes(UTF8) } } object Json4sSerializer { private val UTF8: Charset = Charset.forName("UTF-8") private val ID: Int = ByteBuffer.wrap("json4s".getBytes(UTF8)).getInt }
Example 15
Source File: StreamHandler.scala From scala-js-java-logging with BSD 3-Clause "New" or "Revised" License | 5 votes |
package java.util.logging import java.io.OutputStream import java.nio.charset.Charset class StreamHandler(private[this] var out: OutputStream, private[this] val formatter: Formatter) extends Handler { // Defaults defined on javadocs setLevel(Level.INFO) setFilter(null) if (formatter == null) setFormatter(new SimpleFormatter()) else setFormatter(formatter) // Required by javadoc but it is unspecified what to do if formatter is null def this() = this(null, null) private[this] var headWritten: Boolean = false private def encodingOrDefault: Charset = if (getEncoding == null) Charset.defaultCharset() else Charset.forName(getEncoding) protected def setOutputStream(out: OutputStream): Unit = { // Required by javadocs if (out != null && formatter != null) { out.write(formatter.getTail(this).getBytes(encodingOrDefault)) flush() out.close() } this.headWritten = false this.out = out } // Mentioned as part of StreamHandler javadocs but it doesn't specify behavior override def setEncoding(encoding: String): Unit = super.setEncoding(encoding) private def write(c: Formatter => String): Unit = { // The javadocs don't specify what to do if the formatter is null if (out != null && formatter != null) { out.write(c(formatter).getBytes(encodingOrDefault)) flush() } } private[logging] def writeHeader(): Unit = { if (!headWritten) { write(_.getHead(this)) headWritten = true } } private[logging] def writeTail(): Unit = write(_.getTail(this)) override def publish(record: LogRecord): Unit = { writeHeader() // The javadocs don't specify what to do if the formatter is null if (out != null && formatter != null && isLoggable(record)) { out.write(formatter.format(record).getBytes(encodingOrDefault)) flush() } } override def isLoggable(record:LogRecord): Boolean = out != null && record != null && super.isLoggable(record) override def flush(): Unit = if (out != null) out.flush() override def close(): Unit = { if (out != null) { // Required by javadocs writeHeader() writeTail() flush() out.close() } } }
Example 16
Source File: SequenceSupport.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.component.sequence import java.io.StringReader import java.nio.charset.Charset import com.sksamuel.exts.Logging import com.sksamuel.exts.io.Using import io.eels.component.csv.{CsvFormat, CsvSupport} import io.eels.schema.{Field, StructType} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.io.{BytesWritable, IntWritable, SequenceFile} object SequenceSupport extends Logging with Using { def createReader(path: Path)(implicit conf: Configuration): SequenceFile.Reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(path)) def toValues(v: BytesWritable): Array[String] = toValues(new String(v.copyBytes(), Charset.forName("UTF8"))) def toValues(str: String): Array[String] = { val parser = CsvSupport.createParser(CsvFormat(), false, false, false, null, null) parser.beginParsing(new StringReader(str)) val record = parser.parseNext() parser.stopParsing() record } def schema(path: Path)(implicit conf: Configuration): StructType = { logger.debug(s"Fetching sequence schema for $path") using(createReader(path)) { it => val k = new IntWritable() val v = new BytesWritable() val fields: Array[Field] = { it.next(k, v) toValues(v).map { it => new Field(it) } } StructType(fields.toList) } } }
Example 17
Source File: io.scala From sbt-org-policies with Apache License 2.0 | 5 votes |
package sbtorgpolicies import java.io._ import java.net.URL import java.nio.charset.Charset import java.nio.file.Path import java.nio.file.Paths.get import cats.syntax.either._ import sbtorgpolicies.exceptions.IOException import scala.io.Source import scala.language.implicitConversions package object io { type IOResult[T] = Either[IOException, T] object syntax { implicit def eitherFilterSyntax[T](either: Either[Throwable, T]): FilteredEitherOps[T] = new FilteredEitherOps(either) implicit def fileNameSyntax(fileName: String): FileNameOps = new FileNameOps(fileName) final class FilteredEitherOps[T](either: Either[Throwable, T]) { def withFilter(f: T => Boolean): Either[Throwable, T] = either match { case Right(r) if !f(r) => new IllegalStateException("Filter condition has not been satisfied").asLeft[T] case _ => either } } final class FileNameOps(filename: String) { def toPath: Path = get(filename) def toFile: File = new File(filename.fixPath) def fixPath: String = filename.replaceAll("/", File.separator) def ensureFinalSlash: String = filename + (if (filename.endsWith(File.separator)) "" else File.separator) } } object IO { def file(path: String): File = new File(path) def url(address: String): URL = new URL(address) def readLines(file: File): Iterator[String] = Source.fromFile(file).getLines() def readBytes(file: File): Array[Byte] = { val is: InputStream = new FileInputStream(file) val array: Array[Byte] = Stream.continually(is.read).takeWhile(_ != -1).map(_.toByte).toArray is.close() array } def write(file: File, content: String, charset: Charset = Charset.forName("UTF-8")): Unit = { val writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(file, false), charset) ) writer.write(content) writer.close() } def relativize(base: File, file: File): Option[String] = { def ensureEndingSlash: Option[String] = { val path = base.getAbsolutePath path.lastOption.map { case c if c == File.separatorChar => path case _ => path + File.separatorChar } } val baseFileString = if (base.isDirectory) ensureEndingSlash else None val pathString = file.getAbsolutePath baseFileString flatMap { case baseString if pathString.startsWith(baseString) => Some(pathString.substring(baseString.length)) case _ => None } } } }
Example 18
Source File: StoreOpsTest.scala From fs2-blobstore with Apache License 2.0 | 5 votes |
package blobstore import java.nio.charset.Charset import java.nio.file.Files import java.util.concurrent.Executors import cats.effect.{Blocker, IO} import cats.effect.laws.util.TestInstances import cats.implicits._ import fs2.Pipe import org.scalatest.Assertion import org.scalatest.flatspec.AnyFlatSpec import implicits._ import org.scalatest.matchers.must.Matchers import scala.collection.mutable.ArrayBuffer import scala.concurrent.ExecutionContext class StoreOpsTest extends AnyFlatSpec with Matchers with TestInstances { implicit val cs = IO.contextShift(ExecutionContext.global) val blocker = Blocker.liftExecutionContext(ExecutionContext.fromExecutor(Executors.newCachedThreadPool)) behavior of "PutOps" it should "buffer contents and compute size before calling Store.put" in { val bytes: Array[Byte] = "AAAAAAAAAA".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) fs2.Stream.emits(bytes).covary[IO].through(store.bufferedPut(Path("path/to/file.txt"), blocker)).compile.drain.unsafeRunSync() store.buf.toArray must be(bytes) } it should "upload a file from a nio Path" in { val bytes = "hello".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) fs2.Stream.bracket(IO(Files.createTempFile("test-file", ".bin"))) { p => IO(p.toFile.delete).void }.flatMap { p => fs2.Stream.emits(bytes).covary[IO].through(fs2.io.file.writeAll(p, blocker)).drain ++ fs2.Stream.eval(store.put(p, Path("path/to/file.txt"), blocker)) }.compile.drain.unsafeRunSync() store.buf.toArray must be(bytes) } } final case class DummyStore(check: Path => Assertion) extends Store[IO] { val buf = new ArrayBuffer[Byte]() override def put(path: Path): Pipe[IO, Byte, Unit] = { check(path) in => { buf.appendAll(in.compile.toVector.unsafeRunSync()) fs2.Stream.emit(()) } } override def list(path: Path): fs2.Stream[IO, Path] = ??? override def get(path: Path, chunkSize: Int): fs2.Stream[IO, Byte] = ??? override def move(src: Path, dst: Path): IO[Unit] = ??? override def copy(src: Path, dst: Path): IO[Unit] = ??? override def remove(path: Path): IO[Unit] = ??? }
Example 19
Source File: PrefixIndexTestSuite.scala From tispark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.expression.index import java.nio.charset.Charset import org.apache.spark.sql.BaseTiSparkTest import org.apache.spark.sql.catalyst.util.resourceToString class PrefixIndexTestSuite extends BaseTiSparkTest { // https://github.com/pingcap/tispark/issues/272 test("Prefix index read does not work correctly") { tidbStmt.execute( resourceToString( s"prefix-index/PrefixTest.sql", classLoader = Thread.currentThread().getContextClassLoader)) refreshConnections() // add explain to show if we have actually used prefix index in plan explainAndRunTest("select a, b from prefix where b < \"bbc\"") explainAndRunTest("select a, b from prefix where a = 1 and b = \"bbb\"") explainAndRunTest("select b from prefix where b = \"bbc\"") explainAndRunTest("select b from prefix where b != \"bbc\"") explainAndRunTest("select * from prefix where b = 'b'") explainAndRunTest("select b from prefix where b >= \"bbc\" and b < \"bbd\"") // FIXME: following test results in INDEX range [bb, bb] and TABLE range (-INF, bbc), // while the table range should have been [bb, bb] // FYI, the predicate is [[b] LESS_THAN "bbc"], Not(IsNull([b])), [[b] EQUAL "bb"] explainAndRunTest("select c, b from prefix where b = \"bb\" and b < \"bbc\"") println(Charset.defaultCharset()) explainAndRunTest( "select c, b from prefix where b > \"ÿ\" and b < \"ÿÿc\"", skipJDBC = true, rTiDB = List(List(8, "ÿÿ"), List(9, "ÿÿ0"))) // add LIKE tests for prefix index explainAndRunTest("select a, b from prefix where b LIKE 'b%'") explainAndRunTest("select a, b from prefix where b LIKE 'ab%'") explainAndRunTest( "select a, b from prefix where b LIKE 'ÿÿ%'", skipJDBC = true, rTiDB = List(List(7, "ÿÿ"), List(8, "ÿÿ0"), List(9, "ÿÿÿ"))) explainAndRunTest("select a, b from prefix where b LIKE 'b%b'") explainAndRunTest("select a, b from prefix where b LIKE 'ÿ%'", skipJDBC = true) explainAndRunTest("select a, b from prefix where b LIKE '%b'") explainAndRunTest("select a, b from prefix where b LIKE '%'") } // https://github.com/pingcap/tispark/issues/397 test("Prefix index implementation for utf8 string is incorrect") { tidbStmt.execute( resourceToString( s"prefix-index/UTF8Test.sql", classLoader = Thread.currentThread().getContextClassLoader)) refreshConnections() spark.sql("select * from t1").show explainAndRunTest("select * from t1 where name = '中文字符集_测试'", skipJDBC = true) explainAndRunTest("select * from t1 where name < '中文字符集_测试'", skipJDBC = true) explainAndRunTest("select * from t1 where name > '中文字符集_测试'", skipJDBC = true) } test("index double scan with predicate") { tidbStmt.execute("drop table if exists test_index") tidbStmt.execute( "create table test_index(id bigint(20), c1 text default null, c2 int, c3 int, c4 int, KEY idx_c1(c1(10)))") tidbStmt.execute("insert into test_index values(1, 'aairy', 10, 20, 30)") tidbStmt.execute("insert into test_index values(2, 'dairy', 20, 30, 40)") tidbStmt.execute("insert into test_index values(3, 'zairy', 30, 40, 50)") refreshConnections() // refresh since we need to load data again explainAndRunTest("select c1, c2 from test_index where c1 < 'dairy' and c2 > 20") explainAndRunTest("select c1, c2 from test_index where c1 = 'dairy'") explainAndRunTest("select c1, c2 from test_index where c1 > 'dairy'") explainAndRunTest("select c2 from test_index where c1 < 'dairy'") explainAndRunTest("select c2 from test_index where c1 = 'dairy'") explainAndRunTest("select c2, c2 from test_index where c1 > 'dairy'") explainAndRunTest("select c2, c2 from test_index where c1 < 'dairy'") explainAndRunTest("select c2, c2 from test_index where c1 = 'dairy'") explainAndRunTest("select max(c2) from test_index where c1 > 'dairy'") explainAndRunTest("select max(c2) from test_index where c1 < 'dairy'") explainAndRunTest("select max(c2) from test_index where c1 = 'dairy'") } override def afterAll(): Unit = try { tidbStmt.execute("DROP TABLE IF EXISTS `prefix`") tidbStmt.execute("DROP TABLE IF EXISTS `t1`") tidbStmt.execute("DROP TABLE IF EXISTS `test_index`") } finally { super.afterAll() } }
Example 20
Source File: package.scala From featherbed with Apache License 2.0 | 5 votes |
package featherbed import java.nio.charset.Charset import cats.data.ValidatedNel import cats.implicits._ import io.circe._ import io.circe.generic.auto._ import io.circe.parser._ import io.circe.syntax._ import shapeless.Witness package object circe { private val printer = Printer.noSpaces.copy(dropNullValues = true) implicit def circeEncoder[A: Encoder]: content.Encoder[A, Witness.`"application/json"`.T] = content.Encoder.of("application/json") { (value: A, charset: Charset) => content.Encoder.encodeString(printer.pretty(value.asJson), charset) } implicit def circeDecoder[A: Decoder]: content.Decoder.Aux[Witness.`"application/json"`.T, A] = content.Decoder.of("application/json") { response => content.Decoder.decodeString(response).andThen { str => (parse(str).toValidated.toValidatedNel: ValidatedNel[Throwable, Json]).andThen { json: Json => json.as[A].toValidated.toValidatedNel: ValidatedNel[Throwable, A] } } } }
Example 21
Source File: package.scala From featherbed with Apache License 2.0 | 5 votes |
package featherbed import java.nio.CharBuffer import java.nio.charset.{Charset, CodingErrorAction} import scala.util.Try import cats.data.{Validated, ValidatedNel} import com.twitter.finagle.http.Response import com.twitter.io.Buf import shapeless.Witness import sun.nio.cs.ThreadLocalCoders package object content { type ContentType = String trait Decoder[ContentType] { type Out val contentType: String //widened version of ContentType def apply(buf: Response): ValidatedNel[Throwable, Out] } object Decoder extends LowPriorityDecoders { type Aux[CT, A1] = Decoder[CT] { type Out = A1 } def of[T <: ContentType, A1](t: T)(fn: Response => ValidatedNel[Throwable, A1]): Decoder.Aux[t.type, A1] = new Decoder[t.type] { type Out = A1 val contentType = t def apply(response: Response) = fn(response) } def decodeString(response: Response): ValidatedNel[Throwable, String] = { Validated.fromTry(Try { response.charset.map(Charset.forName).getOrElse(Charset.defaultCharset) }).andThen { charset: Charset => val decoder = ThreadLocalCoders.decoderFor(charset) Validated.fromTry( Try( decoder .onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT) .decode(Buf.ByteBuffer.Owned.extract(response.content).asReadOnlyBuffer()))).map[String](_.toString) }.toValidatedNel } } private[featherbed] trait LowPriorityDecoders { implicit val plainTextDecoder: Decoder.Aux[Witness.`"text/plain"`.T, String] = Decoder.of("text/plain") { response => Decoder.decodeString(response) } implicit val anyResponseDecoder: Decoder.Aux[Witness.`"**") { response => Validated.Valid(response) } } trait Encoder[A, ForContentType] { def apply(value: A, charset: Charset): ValidatedNel[Throwable, Buf] } object Encoder extends LowPriorityEncoders { def of[A, T <: ContentType](t: T)(fn: (A, Charset) => ValidatedNel[Throwable, Buf]): Encoder[A, t.type] = new Encoder[A, t.type] { def apply(value: A, charset: Charset) = fn(value, charset) } def encodeString(value: String, charset: Charset): ValidatedNel[Throwable, Buf] = { val encoder = ThreadLocalCoders.encoderFor(charset) Validated.fromTry(Try(encoder .onMalformedInput(CodingErrorAction.REPORT) .onUnmappableCharacter(CodingErrorAction.REPORT) .encode(CharBuffer.wrap(value)))).toValidatedNel.map[Buf](Buf.ByteBuffer.Owned(_)) } } private[featherbed] trait LowPriorityEncoders { implicit val plainTextEncoder: Encoder[String, Witness.`"text/plain"`.T] = Encoder.of("text/plain") { case (value, charset) => Encoder.encodeString(value, charset) } } }
Example 22
Source File: XmlFile.scala From spark-xml with Apache License 2.0 | 5 votes |
package com.databricks.spark.xml.util import java.io.CharArrayWriter import java.nio.charset.Charset import javax.xml.stream.XMLOutputFactory import scala.collection.Map import com.databricks.spark.xml.parsers.StaxXmlGenerator import com.sun.xml.txw2.output.IndentingXMLStreamWriter import org.apache.hadoop.io.{Text, LongWritable} import org.apache.spark.rdd.RDD import org.apache.spark.SparkContext import org.apache.spark.sql.DataFrame import com.databricks.spark.xml.{XmlOptions, XmlInputFormat} private[xml] object XmlFile { val DEFAULT_INDENT = " " def withCharset( context: SparkContext, location: String, charset: String, rowTag: String): RDD[String] = { // This just checks the charset's validity early, to keep behavior Charset.forName(charset) context.hadoopConfiguration.set(XmlInputFormat.START_TAG_KEY, s"<$rowTag>") context.hadoopConfiguration.set(XmlInputFormat.END_TAG_KEY, s"</$rowTag>") context.hadoopConfiguration.set(XmlInputFormat.ENCODING_KEY, charset) context.newAPIHadoopFile(location, classOf[XmlInputFormat], classOf[LongWritable], classOf[Text]).map { case (_, text) => new String(text.getBytes, 0, text.getLength, charset) } } def saveAsXmlFile( dataFrame: DataFrame, path: String, parameters: Map[String, String] = Map()): Unit = { val options = XmlOptions(parameters.toMap) val codecClass = CompressionCodecs.getCodecClass(options.codec) val rowSchema = dataFrame.schema val indent = XmlFile.DEFAULT_INDENT val xmlRDD = dataFrame.rdd.mapPartitions { iter => val factory = XMLOutputFactory.newInstance() val writer = new CharArrayWriter() val xmlWriter = factory.createXMLStreamWriter(writer) val indentingXmlWriter = new IndentingXMLStreamWriter(xmlWriter) indentingXmlWriter.setIndentStep(indent) new Iterator[String] { var firstRow: Boolean = true var lastRow: Boolean = true override def hasNext: Boolean = iter.hasNext || firstRow || lastRow override def next: String = { if (iter.nonEmpty) { if (firstRow) { indentingXmlWriter.writeStartElement(options.rootTag) firstRow = false } val xml = { StaxXmlGenerator( rowSchema, indentingXmlWriter, options)(iter.next()) indentingXmlWriter.flush() writer.toString } writer.reset() xml } else { if (!firstRow) { lastRow = false indentingXmlWriter.writeEndElement() indentingXmlWriter.close() writer.toString } else { // This means the iterator was initially empty. firstRow = false lastRow = false "" } } } } } codecClass match { case null => xmlRDD.saveAsTextFile(path) case codec => xmlRDD.saveAsTextFile(path, codec) } } }
Example 23
Source File: HttpRequestSerializer.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.persistence.serializers import java.nio.charset.Charset import akka.http.scaladsl.model.{ HttpEntity, HttpRequest, RemoteAddress } import akka.serialization.SerializerWithStringManifest import com.ing.wbaa.rokku.proxy.data.{ User, UserRawJson } import com.ing.wbaa.rokku.proxy.persistence.{ CurrentRequestsState, ExecutedRequestEvt } import spray.json._ class HttpRequestSerializer extends SerializerWithStringManifest with HttpRequestConversionSupport { override def identifier: Int = 197642 val Utf8 = Charset.forName("UTF-8") val HttpRequestManifest = classOf[ExecutedRequestEvt].getName val HttpRequestsManifest = classOf[CurrentRequestsState].getName def simplifiedHttpRequestString(e: HttpRequest) = SimplifiedHttpRequest( e.method.value, e.uri.toString(), convertAkkaHeadersToStrings(e.headers), HttpEntity.Empty.withContentType(e.entity.contentType).toString(), e.protocol.value ).toJson.toString def userSTSString(u: User) = UserRawJson( u.userName.value, Option(u.userGroups.map(g => g.value)), u.accessKey.value, u.secretKey.value, Option(u.userRole.value)).toJson.toString def remoteIPString(a: RemoteAddress) = SimplifiedRemoteAddress(a.value).toJson.toString() def toExecutedRequestEvt(r: String) = { val Array(hr, u, ip) = r.split("[|]") val httpRequest = toAkkaHttpRequest(hr.parseJson.convertTo[SimplifiedHttpRequest]) val userSTS = User(u.parseJson.convertTo[UserRawJson]) val simplifiedRemoteAddress = ip.parseJson.convertTo[SimplifiedRemoteAddress] ExecutedRequestEvt(httpRequest, userSTS, simplifiedRemoteAddress.toRemoteAddr) } override def manifest(o: AnyRef): String = o.getClass.getName override def toBinary(o: AnyRef): Array[Byte] = o match { case r: ExecutedRequestEvt => s"${simplifiedHttpRequestString(r.httpRequest)}|${userSTSString(r.userSTS)}|${remoteIPString(r.clientIPAddress)}".getBytes(Utf8) case c: CurrentRequestsState => c.requests.map { re => s"${simplifiedHttpRequestString(re.httpRequest)}|${userSTSString(re.userSTS)}|${remoteIPString(re.clientIPAddress)}" }.mkString("|-").getBytes(Utf8) case e: IllegalArgumentException => throw new IllegalArgumentException(s"Unable to serialize to bytes, class: ${o.getClass} ${e.getMessage}") } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { manifest match { case s: String if s == HttpRequestManifest => val storedRequest = new String(bytes, Utf8) toExecutedRequestEvt(storedRequest) case s: String if s == HttpRequestsManifest => val storedRequests = new String(bytes, Utf8) val requestsList: List[String] = storedRequests.split("[|-]{2}").toList CurrentRequestsState(requestsList.map(toExecutedRequestEvt)) case _ => throw new IllegalArgumentException(s"Unable to de-serialize from bytes for manifest: $manifest") } } }
Example 24
Source File: AhcWSUtils.scala From play-ws with Apache License 2.0 | 5 votes |
package play.api.libs.ws.ahc import play.shaded.ahc.org.asynchttpclient.util.HttpUtils import java.nio.charset.Charset import java.nio.charset.StandardCharsets private[ws] object AhcWSUtils { def getResponseBody(ahcResponse: play.shaded.ahc.org.asynchttpclient.Response): String = { val contentType = Option(ahcResponse.getContentType).getOrElse("application/octet-stream") val charset = getCharset(contentType) ahcResponse.getResponseBody(charset) } def getCharset(contentType: String): Charset = { Option(HttpUtils.extractContentTypeCharsetAttribute(contentType)).getOrElse { if (contentType.startsWith("text/")) StandardCharsets.ISO_8859_1 else StandardCharsets.UTF_8 } } }
Example 25
Source File: JsonBodyReadablesSpec.scala From play-ws with Apache License 2.0 | 5 votes |
package play.api.libs.ws import java.nio.charset.Charset import java.nio.charset.StandardCharsets._ import akka.stream.scaladsl.Source import akka.util.ByteString import org.specs2.matcher.MustMatchers import org.specs2.mutable.Specification import play.api.libs.json.JsSuccess import play.api.libs.json.JsValue class JsonBodyReadablesSpec extends Specification with MustMatchers { class StubResponse(byteArray: Array[Byte], charset: Charset = UTF_8) extends StandaloneWSResponse { override def uri: java.net.URI = ??? override def headers: Map[String, Seq[String]] = ??? override def underlying[T]: T = ??? override def status: Int = ??? override def statusText: String = ??? override def cookies: Seq[WSCookie] = ??? override def cookie(name: String): Option[WSCookie] = ??? override def body: String = new String(byteArray, charset) override def bodyAsBytes: ByteString = ByteString.fromArray(byteArray) override def bodyAsSource: Source[ByteString, _] = ??? } "decode encodings correctly" should { "read an encoding of UTF-32BE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charsetName = "UTF-32BE" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charsetName), Charset.forName(charsetName))) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-32LE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charsetName = "UTF-32LE" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charsetName), Charset.forName(charsetName))) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-16BE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charset = UTF_16BE val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charset), charset)) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-16LE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charset = UTF_16LE val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charset), charset)) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-8" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(UTF_8))) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-8 with empty object" in { val readables = new JsonBodyReadables() {} val json = "{}" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(UTF_8))) value.toString() must beEqualTo("{}") } "read an encoding of UTF-8 with empty array" in { val readables = new JsonBodyReadables() {} val json = "[]" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(UTF_8))) value.toString() must beEqualTo("[]") } } }
Example 26
Source File: passwordhashers.scala From tsec with MIT License | 5 votes |
package tsec.passwordhashers import java.nio.charset.Charset import tsec.common.TSecError import tsec.libsodium.ScalaSodium package object libsodium { final case class SodiumPasswordError(cause: String) extends TSecError private[passwordhashers] val asciiEncoder = Charset.forName("US-ASCII").newEncoder() final class PWStrengthParam[PTyp, Str](val opLimit: Int, val memLimit: Int) object PasswordStrength { object MinStrength object InteractiveStrength object ModerateStrength object SensitiveStrength } type MinStrength = PasswordStrength.MinStrength.type type InteractiveStrength = PasswordStrength.InteractiveStrength.type type ModerateStrength = PasswordStrength.ModerateStrength.type type SensitiveStrength = PasswordStrength.SensitiveStrength.type implicit val argonMinstr: PWStrengthParam[Argon2, MinStrength] = new PWStrengthParam[Argon2, MinStrength]( ScalaSodium.crypto_pwhash_argon2id_OPSLIMIT_MIN, ScalaSodium.crypto_pwhash_argon2id_MEMLIMIT_MIN ) implicit val argonInteractiveStr: PWStrengthParam[Argon2, InteractiveStrength] = new PWStrengthParam[Argon2, InteractiveStrength]( ScalaSodium.crypto_pwhash_argon2id_OPSLIMIT_INTERACTIVE, ScalaSodium.crypto_pwhash_argon2id_MEMLIMIT_INTERACTIVE ) implicit val argonModerateStr: PWStrengthParam[Argon2, ModerateStrength] = new PWStrengthParam[Argon2, ModerateStrength]( ScalaSodium.crypto_pwhash_argon2id_OPSLIMIT_MODERATE, ScalaSodium.crypto_pwhash_argon2id_MEMLIMIT_MODERATE ) implicit val argonSensitiveStr: PWStrengthParam[Argon2, SensitiveStrength] = new PWStrengthParam[Argon2, SensitiveStrength]( ScalaSodium.crypto_pwhash_argon2id_OPSLIMIT_SENSITIVE, ScalaSodium.crypto_pwhash_argon2id_MEMLIMIT_SENSITIVE ) implicit val SodiumSCryptMinstr: PWStrengthParam[SodiumSCrypt, MinStrength] = new PWStrengthParam[SodiumSCrypt, MinStrength]( ScalaSodium.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_MIN, ScalaSodium.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_MIN ) implicit val SodiumSCryptInteractiveStr: PWStrengthParam[SodiumSCrypt, InteractiveStrength] = new PWStrengthParam[SodiumSCrypt, InteractiveStrength]( ScalaSodium.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_INTERACTIVE, ScalaSodium.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_INTERACTIVE ) implicit val SodiumSCryptSensitiveStr: PWStrengthParam[SodiumSCrypt, SensitiveStrength] = new PWStrengthParam[SodiumSCrypt, SensitiveStrength]( ScalaSodium.crypto_pwhash_scryptsalsa208sha256_OPSLIMIT_SENSITIVE, ScalaSodium.crypto_pwhash_scryptsalsa208sha256_MEMLIMIT_SENSITIVE ) }
Example 27
Source File: CreateJacksonParser.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.json import java.io.{ByteArrayInputStream, InputStream, InputStreamReader} import java.nio.channels.Channels import java.nio.charset.Charset import com.fasterxml.jackson.core.{JsonFactory, JsonParser} import org.apache.hadoop.io.Text import sun.nio.cs.StreamDecoder import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.unsafe.types.UTF8String private[sql] object CreateJacksonParser extends Serializable { def string(jsonFactory: JsonFactory, record: String): JsonParser = { jsonFactory.createParser(record) } def utf8String(jsonFactory: JsonFactory, record: UTF8String): JsonParser = { val bb = record.getByteBuffer assert(bb.hasArray) val bain = new ByteArrayInputStream( bb.array(), bb.arrayOffset() + bb.position(), bb.remaining()) jsonFactory.createParser(new InputStreamReader(bain, "UTF-8")) } def text(jsonFactory: JsonFactory, record: Text): JsonParser = { jsonFactory.createParser(record.getBytes, 0, record.getLength) } // Jackson parsers can be ranked according to their performance: // 1. Array based with actual encoding UTF-8 in the array. This is the fastest parser // but it doesn't allow to set encoding explicitly. Actual encoding is detected automatically // by checking leading bytes of the array. // 2. InputStream based with actual encoding UTF-8 in the stream. Encoding is detected // automatically by analyzing first bytes of the input stream. // 3. Reader based parser. This is the slowest parser used here but it allows to create // a reader with specific encoding. // The method creates a reader for an array with given encoding and sets size of internal // decoding buffer according to size of input array. private def getStreamDecoder(enc: String, in: Array[Byte], length: Int): StreamDecoder = { val bais = new ByteArrayInputStream(in, 0, length) val byteChannel = Channels.newChannel(bais) val decodingBufferSize = Math.min(length, 8192) val decoder = Charset.forName(enc).newDecoder() StreamDecoder.forDecoder(byteChannel, decoder, decodingBufferSize) } def text(enc: String, jsonFactory: JsonFactory, record: Text): JsonParser = { val sd = getStreamDecoder(enc, record.getBytes, record.getLength) jsonFactory.createParser(sd) } def inputStream(jsonFactory: JsonFactory, is: InputStream): JsonParser = { jsonFactory.createParser(is) } def inputStream(enc: String, jsonFactory: JsonFactory, is: InputStream): JsonParser = { jsonFactory.createParser(new InputStreamReader(is, enc)) } def internalRow(jsonFactory: JsonFactory, row: InternalRow): JsonParser = { val ba = row.getBinary(0) jsonFactory.createParser(ba, 0, ba.length) } def internalRow(enc: String, jsonFactory: JsonFactory, row: InternalRow): JsonParser = { val binary = row.getBinary(0) val sd = getStreamDecoder(enc, binary, binary.length) jsonFactory.createParser(sd) } }
Example 28
Source File: CodecStreams.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources import java.io.{InputStream, OutputStream, OutputStreamWriter} import java.nio.charset.{Charset, StandardCharsets} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.io.compress._ import org.apache.hadoop.mapreduce.JobContext import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat import org.apache.hadoop.util.ReflectionUtils import org.apache.spark.TaskContext object CodecStreams { private def getDecompressionCodec(config: Configuration, file: Path): Option[CompressionCodec] = { val compressionCodecs = new CompressionCodecFactory(config) Option(compressionCodecs.getCodec(file)) } def createInputStream(config: Configuration, file: Path): InputStream = { val fs = file.getFileSystem(config) val inputStream: InputStream = fs.open(file) getDecompressionCodec(config, file) .map(codec => codec.createInputStream(inputStream)) .getOrElse(inputStream) } def getCompressionExtension(context: JobContext): String = { getCompressionCodec(context) .map(_.getDefaultExtension) .getOrElse("") } }
Example 29
Source File: TextOptions.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.text import java.nio.charset.{Charset, StandardCharsets} import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, CompressionCodecs} val wholeText = parameters.getOrElse(WHOLETEXT, "false").toBoolean val encoding: Option[String] = parameters.get(ENCODING) val lineSeparator: Option[String] = parameters.get(LINE_SEPARATOR).map { lineSep => require(lineSep.nonEmpty, s"'$LINE_SEPARATOR' cannot be an empty string.") lineSep } // Note that the option 'lineSep' uses a different default value in read and write. val lineSeparatorInRead: Option[Array[Byte]] = lineSeparator.map { lineSep => lineSep.getBytes(encoding.map(Charset.forName(_)).getOrElse(StandardCharsets.UTF_8)) } val lineSeparatorInWrite: Array[Byte] = lineSeparatorInRead.getOrElse("\n".getBytes(StandardCharsets.UTF_8)) } private[datasources] object TextOptions { val COMPRESSION = "compression" val WHOLETEXT = "wholetext" val ENCODING = "encoding" val LINE_SEPARATOR = "lineSep" }
Example 30
Source File: NodeIdVectorClockBase64.scala From JustinDB with Apache License 2.0 | 5 votes |
package justin.db.versioning import java.nio.charset.{Charset, StandardCharsets} import java.util.Base64 import justin.db.consistenthashing.NodeId import justin.db.vectorclocks.{Counter, VectorClock} import spray.json.DefaultJsonProtocol._ import spray.json._ import scala.util.Try object NodeIdVectorClockBase64 { val charset: Charset = StandardCharsets.UTF_8 } class NodeIdVectorClockBase64 { import NodeIdVectorClockBase64._ def encode(vclock: VectorClock[NodeId]): Try[String] = Try { val vcClockBytes = vclock.toList .map { case (nodeId, counter) => (nodeId.id.toString, counter.value) } .toJson .compactPrint .getBytes(charset) Base64.getEncoder.encodeToString(vcClockBytes) } def decode(base64: String): Try[VectorClock[NodeId]] = Try { val decodedMap = new String(Base64.getDecoder.decode(base64), charset) .parseJson.convertTo[List[(String, Int)]] .map { case (k, v) => (NodeId(k.toInt), Counter(v))} .toMap VectorClock.apply(decodedMap) } }
Example 31
Source File: Responses.scala From finagle-postgres with Apache License 2.0 | 5 votes |
package com.twitter.finagle.postgres import java.nio.charset.Charset import com.twitter.finagle.postgres.messages.{DataRow, Field} import com.twitter.finagle.postgres.values.ValueDecoder import com.twitter.util.Try import Try._ import com.twitter.concurrent.AsyncStream import com.twitter.finagle.postgres.PostgresClient.TypeSpecifier import com.twitter.finagle.postgres.codec.NullValue import io.netty.buffer.ByteBuf import scala.language.existentials // capture all common format data for a set of rows to reduce repeated references case class RowFormat( indexMap: Map[String, Int], formats: Array[Short], oids: Array[Int], dataTypes: Map[Int, TypeSpecifier], receives: PartialFunction[String, ValueDecoder[T] forSome {type T}], charset: Charset ) { @inline final def recv(index: Int) = dataTypes(oids(index)).receiveFunction @inline final def defaultDecoder(index: Int) = receives.applyOrElse(recv(index), (_: String) => ValueDecoder.never) } trait Row { def getOption[T](name: String)(implicit decoder: ValueDecoder[T]): Option[T] def getOption[T](index: Int)(implicit decoder: ValueDecoder[T]): Option[T] def get[T](name: String)(implicit decoder: ValueDecoder[T]): T def get[T](index: Int)(implicit decoder: ValueDecoder[T]): T def getTry[T](name: String)(implicit decoder: ValueDecoder[T]): Try[T] def getTry[T](index: Int)(implicit decoder: ValueDecoder[T]): Try[T] def getOrElse[T](name: String, default: => T)(implicit decoder: ValueDecoder[T]): T def getOrElse[T](index: Int, default: => T)(implicit decoder: ValueDecoder[T]): T def getAnyOption(name: String): Option[Any] def getAnyOption(index: Int): Option[Any] } object Row { def apply(values: Array[Option[ByteBuf]], rowFormat: RowFormat): Row = RowImpl(values, rowFormat) } object ResultSet { def apply( fields: Array[Field], charset: Charset, dataRows: AsyncStream[DataRow], types: Map[Int, TypeSpecifier], receives: PartialFunction[String, ValueDecoder[T] forSome { type T }] ): ResultSet = { val (indexMap, formats, oids) = { val l = fields.length val stringIndex = new Array[(String, Int)](l) val formats = new Array[Short](l) val oids = new Array[Int](l) var i = 0 while(i < l) { val Field(name, format, dataType) = fields(i) stringIndex(i) = (name, i) formats(i) = format oids(i) = dataType i += 1 } (stringIndex.toMap, formats, oids) } val rowFormat = RowFormat(indexMap, formats, oids, types, receives, charset) val rows = dataRows.map { dataRow => Row( values = dataRow.data, rowFormat = rowFormat ) } ResultSet(rows) } }
Example 32
Source File: HStores.scala From finagle-postgres with Apache License 2.0 | 5 votes |
package com.twitter.finagle.postgres.values import java.nio.charset.Charset import scala.util.parsing.combinator.RegexParsers import io.netty.buffer.{ByteBuf, Unpooled} object HStores { object HStoreStringParser extends RegexParsers { def key:Parser[String] = "\"" ~ """([^"\\]*(\\.[^"\\]*)*)""".r ~ "\"" ^^ { case o~value~c => value.replace("\\\"", "\"").replace("\\\\", "\\") } def value = key | "NULL" def item:Parser[(String, Option[String])] = key ~ "=>" ~ value ^^ { case key~arrow~"NULL" => (key, None) case key~arrow~value => (key, Some(value)) } def items:Parser[Map[String, Option[String]]] = repsep(item, ", ") ^^ { l => l.toMap } def apply(input:String):Option[Map[String, Option[String]]] = parseAll(items, input) match { case Success(result, _) => Some(result) case failure:NoSuccess => None } } def parseHStoreString(str: String) = HStoreStringParser(str) def formatHStoreString(hstore: Map[String, Option[String]]) = hstore.map { case (k, v) => val key = s""""${k.replace("\"", "\\\"")}"""" val value = v.map(str => s""""${str.replace("\"", "\\\"")}"""").getOrElse("NULL") s"""$key => $value""" }.mkString(",") def decodeHStoreBinary(buf: ByteBuf, charset: Charset) = { val count = buf.readInt() val pairs = Array.fill(count) { val keyLength = buf.readInt() val key = Array.fill(keyLength)(buf.readByte()) val valueLength = buf.readInt() val value = valueLength match { case -1 => None case l => val valueBytes = Array.fill(l)(buf.readByte()) Some(valueBytes) } new String(key, charset) -> value.map(new String(_, charset)) } pairs.toMap } def encodeHStoreBinary(hstore: Map[String, Option[String]], charset: Charset) = { val buf = Unpooled.buffer() buf.writeInt(hstore.size) hstore foreach { case (key, value) => val keyBytes = key.getBytes(charset) buf.writeInt(keyBytes.length) buf.writeBytes(keyBytes) value match { case None => buf.writeInt(-1) case Some(v) => val valueBytes = v.getBytes(charset) buf.writeInt(valueBytes.length) buf.writeBytes(valueBytes) } } buf } }
Example 33
Source File: Utils.scala From finagle-postgres with Apache License 2.0 | 5 votes |
package com.twitter.finagle.postgres.values import java.nio.charset.Charset import java.security.MessageDigest import io.netty.buffer.ByteBuf import scala.annotation.tailrec object Charsets { val Utf8 = Charset.forName("UTF-8") } object Buffers { @throws(classOf[IllegalArgumentException]) def encrypt(user: Array[Byte], password: Array[Byte], salt: Array[Byte]): Array[Byte] = { require(user != null && user.length > 0, "user should not be empty") require(password != null && password.length > 0, "password should not be empty") require(salt != null && salt.length > 0, "salt should not be empty") val inner = MessageDigest.getInstance("MD5") inner.update(password) inner.update(user) val outer = MessageDigest.getInstance("MD5") outer.update(Hex.valueOf(inner.digest).getBytes) outer.update(salt) ("md5" + Hex.valueOf(outer.digest)).getBytes } } object Hex { def valueOf(buf: Array[Byte]): String = buf.map("%02X" format _).mkString.toLowerCase } object Convert { def asShort(i : Int) = i.asInstanceOf[Short] } object Strings { val empty = new String }
Example 34
Source File: PostgresClient.scala From finagle-postgres with Apache License 2.0 | 5 votes |
package com.twitter.finagle.postgres import java.nio.charset.Charset import com.twitter.concurrent.AsyncStream import com.twitter.finagle.Status import com.twitter.finagle.postgres.messages.SelectResult import com.twitter.finagle.postgres.values.Types import com.twitter.util.Future trait PostgresClient { def charset: Charset def isAvailable: Boolean } object PostgresClient { case class TypeSpecifier(receiveFunction: String, typeName: String, elemOid: Long = 0) val defaultTypes = Map( Types.BOOL -> TypeSpecifier("boolrecv", "bool"), Types.BYTE_A -> TypeSpecifier("bytearecv", "bytea"), Types.CHAR -> TypeSpecifier("charrecv", "char"), Types.NAME -> TypeSpecifier("namerecv", "name"), Types.INT_8 -> TypeSpecifier("int8recv", "int8"), Types.INT_2 -> TypeSpecifier("int2recv", "int2"), Types.INT_4 -> TypeSpecifier("int4recv", "int4"), Types.REG_PROC -> TypeSpecifier("regprocrecv", "regproc"), Types.TEXT -> TypeSpecifier("textrecv", "text"), Types.OID -> TypeSpecifier("oidrecv", "oid"), Types.TID -> TypeSpecifier("tidrecv", "tid"), Types.XID -> TypeSpecifier("xidrecv", "xid"), Types.CID -> TypeSpecifier("cidrecv", "cid"), Types.JSON -> TypeSpecifier("json_recv", "json"), Types.XML -> TypeSpecifier("xml_recv", "xml"), Types.POINT -> TypeSpecifier("point_recv", "point"), Types.L_SEG -> TypeSpecifier("lseg_recv", "lseg"), Types.PATH -> TypeSpecifier("path_recv", "path"), Types.BOX -> TypeSpecifier("box_recv", "box"), Types.POLYGON -> TypeSpecifier("poly_recv", "poly"), Types.LINE -> TypeSpecifier("line_recv", "line"), Types.CIDR -> TypeSpecifier("cidr_recv", "cidr"), Types.FLOAT_4 -> TypeSpecifier("float4recv", "float4"), Types.FLOAT_8 -> TypeSpecifier("float8recv", "float8"), Types.ABS_TIME -> TypeSpecifier("abstimerecv", "abstime"), Types.REL_TIME -> TypeSpecifier("reltimerecv", "reltime"), Types.T_INTERVAL -> TypeSpecifier("tinternalrecv", "tinternal"), Types.UNKNOWN -> TypeSpecifier("unknownrecv", "unknown"), Types.CIRCLE -> TypeSpecifier("circle_recv", "circle"), Types.MONEY -> TypeSpecifier("cash_recv", "cash"), Types.MAC_ADDR -> TypeSpecifier("macaddr_recv", "macaddr"), Types.INET -> TypeSpecifier("inet_recv", "inet"), Types.BP_CHAR -> TypeSpecifier("bpcharrecv", "bpchar"), Types.VAR_CHAR -> TypeSpecifier("varcharrecv", "varchar"), Types.DATE -> TypeSpecifier("date_recv", "date"), Types.TIME -> TypeSpecifier("time_recv", "time"), Types.TIMESTAMP -> TypeSpecifier("timestamp_recv", "timestamp"), Types.TIMESTAMP_TZ -> TypeSpecifier("timestamptz_recv", "timestamptz"), Types.INTERVAL -> TypeSpecifier("interval_recv", "interval"), Types.TIME_TZ -> TypeSpecifier("timetz_recv", "timetz"), Types.BIT -> TypeSpecifier("bit_recv", "bit"), Types.VAR_BIT -> TypeSpecifier("varbit_recv", "varbit"), Types.NUMERIC -> TypeSpecifier("numeric_recv", "numeric"), Types.RECORD -> TypeSpecifier("record_recv", "record"), Types.VOID -> TypeSpecifier("void_recv", "void"), Types.UUID -> TypeSpecifier("uuid_recv", "uuid") ) }
Example 35
Source File: AmqpJsonPathCheckMaterializer.scala From gatling-amqp-plugin with Apache License 2.0 | 5 votes |
package ru.tinkoff.gatling.amqp.checks import java.io.ByteArrayInputStream import java.nio.charset.Charset import io.gatling.core.check.jsonpath.JsonPathCheckType import io.gatling.core.check.{CheckMaterializer, Preparer} import io.gatling.core.json.JsonParsers import ru.tinkoff.gatling.amqp.AmqpCheck import ru.tinkoff.gatling.amqp.request.AmqpProtocolMessage import scala.util.Try class AmqpJsonPathCheckMaterializer(jsonParsers: JsonParsers) extends CheckMaterializer[JsonPathCheckType, AmqpCheck, AmqpProtocolMessage, Any](identity) { override protected def preparer: Preparer[AmqpProtocolMessage, Any] = AmqpJsonPathCheckMaterializer.jsonPathPreparer(jsonParsers) } object AmqpJsonPathCheckMaterializer { private val CharsParsingThreshold = 200 * 1000 private def jsonPathPreparer(jsonParsers: JsonParsers): Preparer[AmqpProtocolMessage, Any] = replyMessage => { val bodyCharset = Try(Charset.forName(replyMessage.amqpProperties.getContentEncoding)) .getOrElse(Charset.defaultCharset()) if (replyMessage.payload.length > CharsParsingThreshold) jsonParsers.safeParse(new ByteArrayInputStream(replyMessage.payload), bodyCharset) else jsonParsers.safeParse(new String(replyMessage.payload, bodyCharset)) } }
Example 36
Source File: AmqpCheckSupport.scala From gatling-amqp-plugin with Apache License 2.0 | 5 votes |
package ru.tinkoff.gatling.amqp.checks import java.nio.charset.Charset import java.util.{Map => JMap} import io.gatling.commons.validation._ import io.gatling.core.Predef.Session import io.gatling.core.check._ import io.gatling.core.check.bytes.BodyBytesCheckType import io.gatling.core.check.string.BodyStringCheckType import io.gatling.core.check.xpath.XmlParsers import io.gatling.core.json.JsonParsers import io.gatling.core.session.Expression import ru.tinkoff.gatling.amqp.AmqpCheck import ru.tinkoff.gatling.amqp.checks.AmqpResponseCodeCheckBuilder.{AmqpMessageCheckType, ExtendedDefaultFindCheckBuilder, _} import ru.tinkoff.gatling.amqp.request.AmqpProtocolMessage import scala.annotation.implicitNotFound import scala.util.Try trait AmqpCheckSupport { def messageCheck: AmqpMessageCheck.type = AmqpMessageCheck val responseCode: ExtendedDefaultFindCheckBuilder[AmqpMessageCheckType, AmqpProtocolMessage, String] = ResponseCode @implicitNotFound("Could not find a CheckMaterializer. This check might not be valid for AMQP.") implicit def checkBuilder2AmqpCheck[A, P, X](checkBuilder: CheckBuilder[A, P, X])( implicit materializer: CheckMaterializer[A, AmqpCheck, AmqpProtocolMessage, P]): AmqpCheck = checkBuilder.build(materializer) @implicitNotFound("Could not find a CheckMaterializer. This check might not be valid for AMQP.") implicit def validatorCheckBuilder2AmqpCheck[A, P, X](validatorCheckBuilder: ValidatorCheckBuilder[A, P, X])( implicit materializer: CheckMaterializer[A, AmqpCheck, AmqpProtocolMessage, P]): AmqpCheck = validatorCheckBuilder.exists @implicitNotFound("Could not find a CheckMaterializer. This check might not be valid for AMQP.") implicit def findCheckBuilder2AmqpCheck[A, P, X](findCheckBuilder: FindCheckBuilder[A, P, X])( implicit materializer: CheckMaterializer[A, AmqpCheck, AmqpProtocolMessage, P]): AmqpCheck = findCheckBuilder.find.exists implicit def amqpXPathMaterializer(implicit xmlParsers: XmlParsers): AmqpXPathCheckMaterializer = new AmqpXPathCheckMaterializer(xmlParsers) implicit def amqpJsonPathMaterializer(implicit jsonParsers: JsonParsers): AmqpJsonPathCheckMaterializer = new AmqpJsonPathCheckMaterializer(jsonParsers) implicit def amqpBodyStringMaterializer: AmqpCheckMaterializer[BodyStringCheckType, String] = new CheckMaterializer[BodyStringCheckType, AmqpCheck, AmqpProtocolMessage, String](identity) { override protected def preparer: Preparer[AmqpProtocolMessage, String] = replyMessage => { val bodyCharset = Try(Charset.forName(replyMessage.amqpProperties.getContentEncoding)) .getOrElse(Charset.defaultCharset()) if (replyMessage.payload.length > 0) { new String(replyMessage.payload, bodyCharset).success } else "".success } } implicit def amqpBodyByteMaterializer: AmqpCheckMaterializer[BodyBytesCheckType, Array[Byte]] = new CheckMaterializer[BodyBytesCheckType, AmqpCheck, AmqpProtocolMessage, Array[Byte]](identity) { override protected def preparer: Preparer[AmqpProtocolMessage, Array[Byte]] = replyMessage => { if (replyMessage.payload.length > 0) { replyMessage.payload.success } else Array.emptyByteArray.success } } implicit val amqpStatusCheckMaterializer: AmqpCheckMaterializer[AmqpMessageCheckType, AmqpProtocolMessage] = new AmqpCheckMaterializer[AmqpMessageCheckType, AmqpProtocolMessage](identity) { override val preparer: Preparer[AmqpProtocolMessage, AmqpProtocolMessage] = _.success } implicit val amqpUntypedConditionalCheckWrapper: UntypedConditionalCheckWrapper[AmqpCheck] = (condition: Expression[Boolean], thenCheck: AmqpCheck) => new Check[AmqpProtocolMessage] { private val typedCondition = (_: AmqpProtocolMessage, ses: Session) => condition(ses) override def check(response: AmqpProtocolMessage, session: Session, preparedCache: JMap[Any, Any]): Validation[CheckResult] = ConditionalCheck(typedCondition, thenCheck).check(response, session, preparedCache) } implicit val amqpTypedConditionalCheckWrapper: TypedConditionalCheckWrapper[AmqpProtocolMessage, AmqpCheck] = (condition: (AmqpProtocolMessage, Session) => Validation[Boolean], thenCheck: AmqpCheck) => (response: AmqpProtocolMessage, session: Session, preparedCache: JMap[Any, Any]) => ConditionalCheck(condition, thenCheck).check(response, session, preparedCache) }
Example 37
Source File: JettyRestClient.scala From udash-core with Apache License 2.0 | 5 votes |
package io.udash package rest.jetty import java.net.HttpCookie import java.nio.charset.Charset import com.avsystem.commons._ import com.avsystem.commons.annotation.explicitGenerics import io.udash.rest.raw._ import io.udash.utils.URLEncoder import org.eclipse.jetty.client.HttpClient import org.eclipse.jetty.client.api.Result import org.eclipse.jetty.client.util.{BufferingResponseListener, BytesContentProvider, StringContentProvider} import org.eclipse.jetty.http.{HttpHeader, MimeTypes} import scala.util.{Failure, Success} import scala.concurrent.duration._ object JettyRestClient { final val DefaultMaxResponseLength = 2 * 1024 * 1024 final val DefaultTimeout = 10.seconds @explicitGenerics def apply[RestApi: RawRest.AsRealRpc : RestMetadata]( client: HttpClient, baseUri: String, maxResponseLength: Int = DefaultMaxResponseLength, timeout: Duration = DefaultTimeout ): RestApi = RawRest.fromHandleRequest[RestApi](asHandleRequest(client, baseUri, maxResponseLength, timeout)) def asHandleRequest( client: HttpClient, baseUrl: String, maxResponseLength: Int = DefaultMaxResponseLength, timeout: Duration = DefaultTimeout ): RawRest.HandleRequest = RawRest.safeHandle { request => callback => val path = baseUrl + PlainValue.encodePath(request.parameters.path) val httpReq = client.newRequest(baseUrl).method(request.method.name) httpReq.path(path) request.parameters.query.entries.foreach { case (name, PlainValue(value)) => httpReq.param(name, value) } request.parameters.headers.entries.foreach { case (name, PlainValue(value)) => httpReq.header(name, value) } request.parameters.cookies.entries.foreach { case (name, PlainValue(value)) => httpReq.cookie(new HttpCookie( URLEncoder.encode(name, spaceAsPlus = true), URLEncoder.encode(value, spaceAsPlus = true))) } request.body match { case HttpBody.Empty => case tb: HttpBody.Textual => httpReq.content(new StringContentProvider(tb.contentType, tb.content, Charset.forName(tb.charset))) case bb: HttpBody.Binary => httpReq.content(new BytesContentProvider(bb.contentType, bb.bytes)) } timeout match { case fd: FiniteDuration => httpReq.timeout(fd.length, fd.unit) case _ => } httpReq.send(new BufferingResponseListener(maxResponseLength) { override def onComplete(result: Result): Unit = if (result.isSucceeded) { val httpResp = result.getResponse val contentTypeOpt = httpResp.getHeaders.get(HttpHeader.CONTENT_TYPE).opt val charsetOpt = contentTypeOpt.map(MimeTypes.getCharsetFromContentType) val body = (contentTypeOpt, charsetOpt) match { case (Opt(contentType), Opt(charset)) => HttpBody.textual(getContentAsString, MimeTypes.getContentTypeWithoutCharset(contentType), charset) case (Opt(contentType), Opt.Empty) => HttpBody.binary(getContent, contentType) case _ => HttpBody.Empty } val headers = httpResp.getHeaders.iterator.asScala.map(h => (h.getName, PlainValue(h.getValue))).toList val response = RestResponse(httpResp.getStatus, IMapping(headers), body) callback(Success(response)) } else { callback(Failure(result.getFailure)) } }) } }
Example 38
Source File: ResponseHelper.scala From ledger-manager-chrome with MIT License | 5 votes |
package co.ledger.wallet.core.net import java.io.{ByteArrayOutputStream, StringWriter} import java.nio.charset.Charset import org.json.{JSONArray, JSONObject} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.scalajs.js import scala.util.{Failure, Success} object ResponseHelper { implicit class ResponseFuture(f: Future[HttpClient#Response]) { def json: Future[(JSONObject, HttpClient#Response)] = { f.string.map { case (body, response) => (new JSONObject(body), response) } } def jsonArray: Future[(JSONArray, HttpClient#Response)] = { f.string.map { case (body, response) => (new JSONArray(body), response) } } def string: Future[(String, HttpClient#Response)] = { f.bytes.map { case (body, response) => val writer = new StringWriter(body.length) body foreach {(char) => writer.append(char.toChar) } (writer.toString, response) } } def bytes: Future[(Array[Byte], HttpClient#Response)] = { f.map { response => val input = response.body val output = new ByteArrayOutputStream() val buffer = new Array[Byte](4096) var read = 0 while ({read = input.read(buffer); read} > 0) { output.write(buffer, 0, read) } val result = output.toByteArray input.close() output.close() (result, response) } } def noResponseBody: Future[HttpClient#Response] = { f.andThen { case Success(response) => response.body.close() response case Failure(cause) => throw cause } } } }
Example 39
Source File: DefaultRowReader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.binary import java.io.{ByteArrayInputStream, DataInputStream} import java.nio.charset.Charset import ml.combust.mleap.runtime.serialization.{BuiltinFormats, RowReader} import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.{ArrayRow, Row} import resource._ import scala.util.Try class DefaultRowReader(override val schema: StructType) extends RowReader { private val serializers = schema.fields.map(_.dataType).map(ValueSerializer.serializerForDataType) override def fromBytes(bytes: Array[Byte], charset: Charset = BuiltinFormats.charset): Try[Row] = { (for(in <- managed(new ByteArrayInputStream(bytes))) yield { val din = new DataInputStream(in) val row = ArrayRow(new Array[Any](schema.fields.length)) var i = 0 for(s <- serializers) { row.set(i, s.read(din)) i = i + 1 } row }).tried } }
Example 40
Source File: DefaultFrameWriter.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.binary import java.io.{ByteArrayOutputStream, DataOutputStream} import java.nio.charset.Charset import ml.combust.mleap.json.JsonSupport._ import ml.combust.mleap.runtime.frame.LeapFrame import ml.combust.mleap.runtime.serialization.{BuiltinFormats, FrameWriter} import spray.json._ import resource._ import scala.util.Try class DefaultFrameWriter[LF <: LeapFrame[LF]](frame: LF) extends FrameWriter { override def toBytes(charset: Charset = BuiltinFormats.charset): Try[Array[Byte]] = { (for(out <- managed(new ByteArrayOutputStream())) yield { val serializers = frame.schema.fields.map(_.dataType).map(ValueSerializer.serializerForDataType) val dout = new DataOutputStream(out) val schemaBytes = frame.schema.toJson.prettyPrint.getBytes(BuiltinFormats.charset) val rows = frame.collect() dout.writeInt(schemaBytes.length) dout.write(schemaBytes) dout.writeInt(rows.size) for(row <- rows) { var i = 0 for(s <- serializers) { s.write(row.getRaw(i), dout) i = i + 1 } } dout.flush() out.toByteArray }).tried } }
Example 41
Source File: DefaultRowWriter.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.binary import java.io.{ByteArrayOutputStream, DataOutputStream} import java.nio.charset.Charset import ml.combust.mleap.runtime.serialization.{BuiltinFormats, RowWriter} import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.Row import resource._ import scala.util.Try class DefaultRowWriter(override val schema: StructType) extends RowWriter { private val serializers = schema.fields.map(_.dataType).map(ValueSerializer.serializerForDataType) override def toBytes(row: Row, charset: Charset = BuiltinFormats.charset): Try[Array[Byte]] = { (for(out <- managed(new ByteArrayOutputStream())) yield { val dout = new DataOutputStream(out) var i = 0 for(s <- serializers) { s.write(row.getRaw(i), dout) i = i + 1 } dout.flush() out.toByteArray }).tried } }
Example 42
Source File: DefaultFrameReader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.binary import java.io.{ByteArrayInputStream, DataInputStream} import java.nio.charset.Charset import ml.combust.mleap.runtime.serialization.{BuiltinFormats, FrameReader} import ml.combust.mleap.core.types.StructType import ml.combust.mleap.json.JsonSupport._ import ml.combust.mleap.runtime.frame.{ArrayRow, DefaultLeapFrame, Row} import spray.json._ import resource._ import scala.collection.mutable import scala.util.Try class DefaultFrameReader extends FrameReader { override def fromBytes(bytes: Array[Byte], charset: Charset = BuiltinFormats.charset): Try[DefaultLeapFrame] = { (for(in <- managed(new ByteArrayInputStream(bytes))) yield { val din = new DataInputStream(in) val length = din.readInt() val schemaBytes = new Array[Byte](length) din.readFully(schemaBytes) val schema = new String(schemaBytes, BuiltinFormats.charset).parseJson.convertTo[StructType] val serializers = schema.fields.map(_.dataType).map(ValueSerializer.serializerForDataType) val rowCount = din.readInt() val rows = mutable.WrappedArray.make[Row](new Array[Row](rowCount)) for(i <- 0 until rowCount) { val row = new ArrayRow(new Array[Any](schema.fields.length)) var j = 0 for(s <- serializers) { row.set(j, s.read(din)) j = j + 1 } rows(i) = row } DefaultLeapFrame(schema, rows) }).tried } }
Example 43
Source File: FrameReader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.runtime.serialization import java.io._ import java.nio.charset.Charset import ml.combust.mleap.ClassLoaderUtil import ml.combust.mleap.runtime.frame.DefaultLeapFrame import org.apache.commons.io.IOUtils import resource._ import scala.util.Try object FrameReader { def apply(format: String = BuiltinFormats.json, clOption: Option[ClassLoader] = None): FrameReader = { val cl = clOption.getOrElse(ClassLoaderUtil.findClassLoader(classOf[FrameReader].getCanonicalName)) cl.loadClass(s"$format.DefaultFrameReader"). newInstance(). asInstanceOf[FrameReader] } } trait FrameReader { def fromBytes(bytes: Array[Byte], charset: Charset = BuiltinFormats.charset): Try[DefaultLeapFrame] def read(file: File): Try[DefaultLeapFrame] = read(file, BuiltinFormats.charset) def read(file: File, charset: Charset): Try[DefaultLeapFrame] = { (for(in <- managed(new FileInputStream(file))) yield { read(in, charset) }).tried.flatMap(identity) } def read(in: InputStream): Try[DefaultLeapFrame] = read(in, BuiltinFormats.charset) def read(in: InputStream, charset: Charset): Try[DefaultLeapFrame] = { Try(IOUtils.toByteArray(in)).flatMap(bytes => fromBytes(bytes, charset)) } }
Example 44
Source File: RowReader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.runtime.serialization import java.nio.charset.Charset import ml.combust.mleap.ClassLoaderUtil import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.Row import scala.util.Try object RowReader { def apply(schema: StructType, format: String = BuiltinFormats.json, clOption: Option[ClassLoader] = None): RowReader = { val cl = clOption.getOrElse(ClassLoaderUtil.findClassLoader(classOf[RowReader].getCanonicalName)) cl.loadClass(s"$format.DefaultRowReader"). getConstructor(classOf[StructType]). newInstance(schema). asInstanceOf[RowReader] } } trait RowReader { val schema: StructType def fromBytes(bytes: Array[Byte], charset: Charset = BuiltinFormats.charset): Try[Row] }
Example 45
Source File: FrameWriter.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.runtime.serialization import java.io.{File, FileOutputStream, OutputStream} import java.nio.charset.Charset import ml.combust.mleap.ClassLoaderUtil import ml.combust.mleap.runtime.frame.LeapFrame import resource._ import scala.reflect.ClassTag import scala.util.Try object FrameWriter { def apply[LF <: LeapFrame[LF]](frame: LF, format: String = BuiltinFormats.json, clOption: Option[ClassLoader] = None) (implicit ct: ClassTag[LF]): FrameWriter = { val cl = clOption.getOrElse(ClassLoaderUtil.findClassLoader(classOf[FrameWriter].getCanonicalName)) cl.loadClass(s"$format.DefaultFrameWriter"). getConstructor(classOf[LeapFrame[_]]). newInstance(frame). asInstanceOf[FrameWriter] } } trait FrameWriter { def toBytes(charset: Charset = BuiltinFormats.charset): Try[Array[Byte]] def save(file: File): Try[Any] = save(file, BuiltinFormats.charset) def save(file: File, charset: Charset = BuiltinFormats.charset): Try[Any] = { (for(out <- managed(new FileOutputStream(file))) yield { save(out, charset) }).tried.flatMap(identity) } def save(out: OutputStream): Try[Any] = save(out, BuiltinFormats.charset) def save(out: OutputStream, charset: Charset): Try[Any] = { toBytes(charset).map(out.write) } }
Example 46
Source File: RowWriter.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.runtime.serialization import java.nio.charset.Charset import ml.combust.mleap.ClassLoaderUtil import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.Row import scala.util.Try object RowWriter { def apply(schema: StructType, format: String = BuiltinFormats.json, clOption: Option[ClassLoader] = None): RowWriter = { val cl = clOption.getOrElse(ClassLoaderUtil.findClassLoader(classOf[RowWriter].getCanonicalName)) cl.loadClass(s"$format.DefaultRowWriter"). getConstructor(classOf[StructType]). newInstance(schema). asInstanceOf[RowWriter] } } trait RowWriter { val schema: StructType def toBytes(row: Row, charset: Charset = BuiltinFormats.charset): Try[Array[Byte]] }
Example 47
Source File: DefaultRowReader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.avro import java.nio.charset.Charset import org.apache.avro.Schema import org.apache.avro.generic.{GenericData, GenericDatumReader} import org.apache.avro.io.{BinaryDecoder, DecoderFactory} import SchemaConverter._ import ml.combust.mleap.runtime.serialization.{BuiltinFormats, RowReader} import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.{ArrayRow, Row} import scala.util.Try class DefaultRowReader(override val schema: StructType) extends RowReader { val valueConverter = ValueConverter() lazy val readers = schema.fields.map(_.dataType).map(valueConverter.avroToMleap) val avroSchema = schema: Schema val datumReader = new GenericDatumReader[GenericData.Record](avroSchema) var decoder: BinaryDecoder = null var record = new GenericData.Record(avroSchema) override def fromBytes(bytes: Array[Byte], charset: Charset = BuiltinFormats.charset): Try[Row] = Try { decoder = DecoderFactory.get().binaryDecoder(bytes, decoder) record = datumReader.read(record, decoder) val row = ArrayRow(new Array[Any](schema.fields.length)) for(i <- schema.fields.indices) { row.set(i, readers(i)(record.get(i))) } row } }
Example 48
Source File: DefaultFrameWriter.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.avro import java.io.ByteArrayOutputStream import java.nio.charset.Charset import org.apache.avro.Schema import org.apache.avro.file.DataFileWriter import org.apache.avro.generic.{GenericData, GenericDatumWriter} import SchemaConverter._ import ml.combust.mleap.runtime.frame.LeapFrame import ml.combust.mleap.runtime.serialization.{BuiltinFormats, FrameWriter} import resource._ import scala.util.{Failure, Try} class DefaultFrameWriter[LF <: LeapFrame[LF]](frame: LF) extends FrameWriter { val valueConverter = ValueConverter() override def toBytes(charset: Charset = BuiltinFormats.charset): Try[Array[Byte]] = { (for(out <- managed(new ByteArrayOutputStream())) yield { val writers = frame.schema.fields.map(_.dataType).map(valueConverter.mleapToAvro) val avroSchema = frame.schema: Schema val record = new GenericData.Record(avroSchema) val datumWriter = new GenericDatumWriter[GenericData.Record](avroSchema) val writer = new DataFileWriter[GenericData.Record](datumWriter) writer.create(avroSchema, out) for(row <- frame.collect()) { var i = 0 for(writer <- writers) { record.put(i, writer(row.getRaw(i))) i = i + 1 } Try(writer.append(record)) match { case Failure(error) => error.printStackTrace() case _ => } } writer.close() out.toByteArray }).tried } }
Example 49
Source File: DefaultRowWriter.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.avro import java.io.ByteArrayOutputStream import java.nio.charset.Charset import org.apache.avro.Schema import org.apache.avro.generic.{GenericData, GenericDatumWriter} import org.apache.avro.io.{BinaryEncoder, EncoderFactory} import SchemaConverter._ import ml.combust.mleap.runtime.serialization.{BuiltinFormats, RowWriter} import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.Row import resource._ import scala.util.Try class DefaultRowWriter(override val schema: StructType) extends RowWriter { val valueConverter = ValueConverter() lazy val writers = schema.fields.map(_.dataType).map(valueConverter.mleapToAvro) val avroSchema = schema: Schema val datumWriter = new GenericDatumWriter[GenericData.Record](avroSchema) var encoder: BinaryEncoder = null var record = new GenericData.Record(avroSchema) override def toBytes(row: Row, charset: Charset = BuiltinFormats.charset): Try[Array[Byte]] = { (for(out <- managed(new ByteArrayOutputStream(1024))) yield { encoder = EncoderFactory.get().binaryEncoder(out, encoder) var i = 0 for(writer <- writers) { record.put(i, writer(row.getRaw(i))) i = i + 1 } datumWriter.write(record, encoder) encoder.flush() out.toByteArray }).tried } }
Example 50
Source File: DefaultFrameReader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.avro import java.nio.charset.Charset import org.apache.avro.file.{DataFileReader, SeekableByteArrayInput} import org.apache.avro.generic.{GenericData, GenericDatumReader} import SchemaConverter._ import ml.combust.mleap.runtime.serialization.{BuiltinFormats, FrameReader} import ml.combust.mleap.core.types.StructType import ml.combust.mleap.runtime.frame.{ArrayRow, DefaultLeapFrame, Row} import scala.collection.mutable import scala.util.Try class DefaultFrameReader extends FrameReader { val valueConverter = ValueConverter() override def fromBytes(bytes: Array[Byte], charset: Charset = BuiltinFormats.charset): Try[DefaultLeapFrame] = Try { val datumReader = new GenericDatumReader[GenericData.Record]() val reader = new DataFileReader[GenericData.Record](new SeekableByteArrayInput(bytes), datumReader) val avroSchema = reader.getSchema val schema = avroSchema: StructType val readers = schema.fields.map(_.dataType).map(valueConverter.avroToMleap) var record = new GenericData.Record(avroSchema) var rows = mutable.Seq[Row]() while(Try(reader.hasNext).getOrElse(false)) { record = reader.next(record) val row = ArrayRow(new Array[Any](schema.fields.length)) for(i <- schema.fields.indices) { row.set(i, readers(i)(record.get(i))) } rows :+= row } DefaultLeapFrame(schema, rows) } }
Example 51
Source File: JavaKSYParser.scala From kaitai_struct_compiler with GNU General Public License v3.0 | 5 votes |
package io.kaitai.struct.formats import java.io._ import java.nio.charset.Charset import java.util.{List => JList, Map => JMap} import io.kaitai.struct.JavaMain.CLIConfig import io.kaitai.struct.format.{ClassSpec, ClassSpecs} import io.kaitai.struct.precompile.YAMLParserError import io.kaitai.struct.{Log, Main} import org.yaml.snakeyaml.constructor.SafeConstructor import org.yaml.snakeyaml.error.MarkedYAMLException import org.yaml.snakeyaml.representer.Representer import org.yaml.snakeyaml.{DumperOptions, LoaderOptions, Yaml} import scala.collection.JavaConversions._ import scala.concurrent.Await import scala.concurrent.duration.Duration object JavaKSYParser { def localFileToSpecs(yamlFilename: String, config: CLIConfig): ClassSpecs = { val firstSpec = fileNameToSpec(yamlFilename) val yamlDir = Option(new File(yamlFilename).getParent).getOrElse(".") val specs = new JavaClassSpecs(yamlDir, config.importPaths, firstSpec) Await.result(Main.importAndPrecompile(specs, config.runtime), Duration.Inf) specs } def fileNameToSpec(yamlFilename: String): ClassSpec = { Log.fileOps.info(() => s"reading $yamlFilename...") // This complex string of classes is due to the fact that Java's // default "FileReader" implementation always uses system locale, // which screws up encoding on some systems and screws up reading // UTF-8 files with BOM val fis = new FileInputStream(yamlFilename) val isr = new InputStreamReader(fis, Charset.forName("UTF-8")) val br = new BufferedReader(isr) try { val scalaSrc = readerToYaml(br) ClassSpec.fromYaml(scalaSrc) } catch { case marked: MarkedYAMLException => val mark = marked.getProblemMark throw YAMLParserError( marked.getProblem, Some(yamlFilename), Some(mark.getLine + 1), Some(mark.getColumn + 1) ) } } def getYamlLoader: Yaml = { val loaderOptions = new LoaderOptions loaderOptions.setAllowDuplicateKeys(false) new Yaml( new SafeConstructor, new Representer, new DumperOptions, loaderOptions ) } def readerToYaml(reader: Reader): Any = { yamlJavaToScala(getYamlLoader.load(reader)) } def stringToYaml(data: String): Any = { yamlJavaToScala(getYamlLoader.load(data)) } def yamlJavaToScala(src: Any): Any = { src match { case jlist: JList[AnyRef] => jlist.toList.map(yamlJavaToScala) case jmap: JMap[String, AnyRef] => jmap.toMap.mapValues(yamlJavaToScala) case _: String => src case _: Double => src case _: Boolean => src case javaInt: java.lang.Integer => javaInt.intValue case javaLong: java.lang.Long => javaLong.longValue case _: java.math.BigInteger => src.toString case null => // may be not the very best idea, but these nulls // should be handled by real parsing code, i.e. where // it tracks tree depth, etc. null } } }
Example 52
Source File: ErrorMessagesSpec.scala From kaitai_struct_compiler with GNU General Public License v3.0 | 5 votes |
package io.kaitai.struct import java.io._ import java.nio.charset.Charset import io.kaitai.struct.JavaMain.CLIConfig import io.kaitai.struct.format.{KSVersion, YAMLParseException} import io.kaitai.struct.formats.JavaKSYParser import io.kaitai.struct.precompile.{ErrorInInput, YAMLParserError} import org.scalatest.FunSuite class ErrorMessagesSpec extends FunSuite { // required, because this class is the sole entry point and this test needs // version info KSVersion.current = Version.version val FORMATS_ERR_DIR = "../tests/formats_err" val CHARSET_UTF8 = Charset.forName("UTF-8") val DEFAULT_CONFIG = CLIConfig() def getExpected(fn: String): String = { val fis = new FileInputStream(fn) val isr = new InputStreamReader(fis, CHARSET_UTF8) val br = new BufferedReader(isr) val firstLine = br.readLine() if (firstLine.startsWith("# ")) { firstLine.substring(2) } else { "???" //fail(s"unable to parse expected line: $firstLine") } } def testOne(f: File): Unit = { val fileName = f.getName val testName = fileName.substring(0, fileName.length - 4) val fn = f.toString test(testName) { val expected = getExpected(fn) val caught = intercept[RuntimeException] { val classSpec = JavaKSYParser.localFileToSpecs(fn, DEFAULT_CONFIG) } caught match { case _: YAMLParseException | _: ErrorInInput | _: YAMLParserError => assertResult(expected) { // replace version-dependent message with a moniker caught.getMessage.replace( s"but you have ${KSVersion.current}", "but you have $KS_VERSION" ) } case other => System.err.println("got other exception, rethrowing") throw other } } } new File(FORMATS_ERR_DIR).listFiles. filter((f) => f.isFile && f.getName.endsWith(".ksy")). sorted.foreach((f) => testOne(f)) }
Example 53
Source File: EncryptorActor.scala From changestream with MIT License | 5 votes |
package changestream.actors import java.nio.charset.Charset import java.util.Base64 import javax.crypto.Cipher import javax.crypto.spec.SecretKeySpec import akka.actor.Actor import com.typesafe.config.{Config, ConfigFactory} import kamon.Kamon import org.slf4j.LoggerFactory import spray.json._ object EncryptorActor { case class Plaintext(message: JsObject) case class Ciphertext(message: JsObject) } class EncryptorActor ( config: Config = ConfigFactory.load().getConfig("changestream.encryptor") ) extends Actor { import EncryptorActor._ protected val log = LoggerFactory.getLogger(getClass) protected val timingMetric = Kamon.timer("changestream.crypto_time") private val charset = Charset.forName("UTF-8") private val decoder = Base64.getDecoder private val encoder = Base64.getEncoder private val cipher = config.getString("cipher") private val decodedKey = decoder.decode(config.getString("key")) private val originalKey = new SecretKeySpec(decodedKey, 0, decodedKey.length, cipher) private val encryptEngine = Cipher.getInstance(cipher) private val decryptEngine = Cipher.getInstance(cipher) private val encryptFields = config.getString("encrypt-fields").toLowerCase().split(',').map(_.trim) override def preStart() = { encryptEngine.init(Cipher.ENCRYPT_MODE, originalKey) decryptEngine.init(Cipher.DECRYPT_MODE, originalKey) } def receive = { case Plaintext(message) => val timer = timingMetric.refine("mode" -> "encrypt").start() val result = encryptFields(message) timer.stop() sender() ! result case Ciphertext(message) => val timer = timingMetric.refine("mode" -> "decrypt").start() val result = decryptFields(message) timer.stop() sender() ! result } private def encryptFields(message: JsObject, jsonPath: String = ""): JsObject = { JsObject( message.fields.map({ case (k:String, plaintextValue:JsValue) if encryptFields.contains(getNextJsonPath(jsonPath, k)) => val plaintextBytes = plaintextValue.compactPrint.getBytes(charset) val cipherText = encryptEngine.doFinal(plaintextBytes) val v = encoder.encodeToString(cipherText) k -> JsString(v) case (k:String, jsObj: JsObject) => k -> encryptFields(jsObj, getNextJsonPath(jsonPath, k)) case (k:String, v: JsValue) => k -> v }) ) } private def decryptFields(message: JsObject, jsonPath: String = ""): JsObject = { JsObject( message.fields.map({ case (k:String, JsString(ciphertextValue)) if encryptFields.contains(getNextJsonPath(jsonPath, k)) => val ciphertextBytes = decoder.decode(ciphertextValue) val plaintextBytes = decryptEngine.doFinal(ciphertextBytes) val v = new String(plaintextBytes, charset).parseJson k -> v case (k:String, jsObj: JsObject) => k -> decryptFields(jsObj, getNextJsonPath(jsonPath, k)) case (k:String, v: JsValue) => k -> v }) ) } private def getNextJsonPath(jsonPath: String, nextPath: String): String = { Seq(jsonPath, nextPath).filter(_.nonEmpty).mkString(".") } }
Example 54
Source File: RetailRecommandSerialization.scala From retail_analytics with Apache License 2.0 | 5 votes |
package models.json import scalaz._ import scalaz.NonEmptyList._ import scalaz.Validation import scalaz.Validation._ import Scalaz._ import net.liftweb.json._ import net.liftweb.json.scalaz.JsonScalaz._ import java.util.Date import java.nio.charset.Charset import controllers.SerializationBase import models.{ RetailRecommand } object RetailRecommandSerialization extends SerializationBase[RetailRecommand] { protected val ProductIdKey = "productId" protected val NoofOrdersKey = "nooforders" override implicit val writer = new JSONW[RetailRecommand] { override def write(h: RetailRecommand): JValue = { JObject( JField(ProductIdKey, toJSON(h.productId)) :: JField(NoofOrdersKey, toJSON(h.nooforders)) :: Nil) } } override implicit val reader = new JSONR[RetailRecommand] { override def read(json: JValue): Result[RetailRecommand] = { val productIdField = field[String](ProductIdKey)(json) val noofordersField = field[String](NoofOrdersKey)(json) (productIdField |@| noofordersField ) { (productId: String, nooforders: String) => new RetailRecommand(productId, nooforders) } } } }
Example 55
Source File: RetailRecommandsSerialization.scala From retail_analytics with Apache License 2.0 | 5 votes |
package models.json import scalaz._ import scalaz.NonEmptyList._ import Scalaz._ import net.liftweb.json._ import net.liftweb.json.scalaz.JsonScalaz._ import controllers.SerializationBase import models._ import java.nio.charset.Charset object RetailRecommandsSerialization extends SerializationBase[RetailRecommands] { protected val JSONClazKey = "json_claz" protected val ResultsKey = "plans" implicit override val writer = new JSONW[RetailRecommands] { override def write(h: RetailRecommands): JValue = { val nrsList: Option[List[JValue]] = h.map { nrOpt: RetailRecommand => nrOpt.toJValue }.some JArray(nrsList.getOrElse(List.empty[JValue])) } } implicit override val reader = new JSONR[RetailRecommands] { override def read(json: JValue): Result[RetailRecommands] = { json match { case JArray(jObjectList) => { val list = jObjectList.flatMap { jValue: JValue => RetailRecommand.fromJValue(jValue) match { case Success(nr) => List(nr) case Failure(fail) => List[RetailRecommand]() } }.some val nrs: RetailRecommands = RetailRecommands(list.getOrElse(RetailRecommands.empty)) nrs.successNel[Error] } case j => UnexpectedJSONError(j, classOf[JArray]).failureNel[RetailRecommands] } } } }
Example 56
Source File: package.scala From retail_analytics with Apache License 2.0 | 5 votes |
import scalaz._ import Scalaz._ //import scalaz.effect.IO import scalaz.EitherT._ import scalaz.Validation //import scalaz.Validation.FlatMap._ import scalaz.NonEmptyList._ import models.json._ import net.liftweb.json._ import net.liftweb.json.scalaz.JsonScalaz._ import java.nio.charset.Charset package object models { type RetailRecommands = List[RetailRecommand] object RetailRecommands { val emptyRR = List(RetailRecommand.empty) def toJValue(nres: RetailRecommands): JValue = { import net.liftweb.json.scalaz.JsonScalaz.toJSON import models.json.RetailRecommandsSerialization.{ writer => RetailRecommandsWriter } toJSON(nres)(RetailRecommandsWriter) } def fromJValue(jValue: JValue)(implicit charset: Charset = Charset.forName("UTF-8")): Result[RetailRecommands] = { import net.liftweb.json.scalaz.JsonScalaz.fromJSON import models.json.RetailRecommandsSerialization.{ reader => RetailRecommandsReader } fromJSON(jValue)(RetailRecommandsReader) } def toJson(nres: RetailRecommands, prettyPrint: Boolean = false): String = if (prettyPrint) { pretty(render(toJValue(nres))) } else { compactRender(toJValue(nres)) } def apply(plansList: List[RetailRecommand]): RetailRecommands = plansList def empty: List[RetailRecommand] = emptyRR } }
Example 57
Source File: StoreOpsTest.scala From fs2-blobstore with Apache License 2.0 | 5 votes |
package blobstore import java.nio.charset.Charset import java.nio.file.Files import java.util.concurrent.Executors import cats.effect.{Blocker, IO} import cats.effect.laws.util.TestInstances import fs2.{Pipe, Stream} import org.scalatest.Assertion import org.scalatest.flatspec.AnyFlatSpec import implicits._ import org.scalatest.matchers.must.Matchers import scala.collection.mutable.ArrayBuffer import scala.concurrent.ExecutionContext class StoreOpsTest extends AnyFlatSpec with Matchers with TestInstances { implicit val cs = IO.contextShift(ExecutionContext.global) val blocker = Blocker.liftExecutionContext(ExecutionContext.fromExecutor(Executors.newCachedThreadPool)) behavior of "PutOps" it should "buffer contents and compute size before calling Store.put" in { val bytes: Array[Byte] = "AAAAAAAAAA".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) Stream .emits(bytes) .covary[IO] .through(store.bufferedPut(Path("path/to/file.txt"), blocker)) .compile .drain .unsafeRunSync() store.buf.toArray must be(bytes) } it should "upload a file from a nio Path" in { val bytes = "hello".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) Stream .bracket(IO(Files.createTempFile("test-file", ".bin"))) { p => IO(p.toFile.delete).void } .flatMap { p => Stream.emits(bytes).covary[IO].through(fs2.io.file.writeAll(p, blocker)).drain ++ Stream.eval(store.put(p, Path("path/to/file.txt"), blocker)) } .compile .drain .unsafeRunSync() store.buf.toArray must be(bytes) } it should "download a file to a nio path" in { val bytes = "hello".getBytes(Charset.forName("utf-8")) val store = DummyStore(_ => succeed) val path = Path("path/to/file.txt") Stream.emits(bytes).through(store.put(path)).compile.drain.unsafeRunSync() Stream .bracket(IO(Files.createTempFile("test-file", ".bin")))(p => IO(p.toFile.delete).void) .flatMap { nioPath => Stream.eval(store.get(path, nioPath, blocker)) >> Stream.eval { IO { Files.readAllBytes(nioPath) mustBe bytes } } } .compile .drain .unsafeRunSync() } } final case class DummyStore(check: Path => Assertion) extends Store[IO] { val buf = new ArrayBuffer[Byte]() override def put(path: Path, overwrite: Boolean): Pipe[IO, Byte, Unit] = { check(path) in => { buf.appendAll(in.compile.toVector.unsafeRunSync()) Stream.emit(()) } } override def get(path: Path, chunkSize: Int): Stream[IO, Byte] = Stream.emits(buf) override def list(path: Path, recursive: Boolean = false): Stream[IO, Path] = ??? override def move(src: Path, dst: Path): IO[Unit] = ??? override def copy(src: Path, dst: Path): IO[Unit] = ??? override def remove(path: Path): IO[Unit] = ??? override def putRotate(computePath: IO[Path], limit: Long): Pipe[IO, Byte, Unit] = ??? }
Example 58
Source File: FileUtils.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.util import java.io.{File, IOException} import java.nio.charset.Charset import com.google.common.io.Files object FileUtils { private val UTF8 = Charset.forName("UTF-8") def write(file: File, str: String): Unit = { Files.write(str, file, UTF8) } def read(file: File): String = { Files.asCharSource(file, UTF8).read() } def writeByteArrayToFile(file: File, bytes: Array[Byte]): Unit = { Files.write(bytes, file) } def readFileToByteArray(file: File): Array[Byte] = { Files.toByteArray(file) } def forceMkdir(directory: File): Unit = { if (directory.exists() && directory.isFile) { throw new IOException(s"Failed to create directory ${directory.toString}, it already exist") } Files.createParentDirs(directory) directory.mkdir() } }
Example 59
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions private val sessionId: UUID = java.util.UUID.randomUUID().toString implicit val timeout = Timeout(21474835.seconds) // Maximum delay implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] val parentHeader = Json.parse(message.frames(delimiterIndex + 3)).validate[ParentHeader].fold[ParentHeader]( // TODO: Investigate better solution than setting parentHeader to null for {} (invalid: Seq[(JsPath, Seq[ValidationError])]) => null, //HeaderBuilder.empty, (valid: ParentHeader) => valid ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T](json: String, reads: Reads[T], handler: T => Unit) : Unit = { Json.parse(json).validate[T](reads).fold( (invalid: Seq[(JsPath, Seq[ValidationError])]) => logger.error(s"Could not parse JSON, ${json}"), (content: T) => handler(content) ) } def getSessionId = sessionId def toKernelMessage(message: ExecuteRequest): KernelMessage = { // construct a kernel message whose content is an ExecuteRequest val id = java.util.UUID.randomUUID().toString val header = Header( id, "spark", sessionId, MessageType.Incoming.ExecuteRequest.toString, "5.0") KMBuilder().withIds(Seq[Array[Byte]]()).withSignature("").withHeader(header) .withParentHeader(HeaderBuilder.empty).withContentString(message).build } }
Example 60
Source File: KernelOutputStream.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.stream import java.io.OutputStream import java.nio.charset.Charset import org.apache.toree.kernel.protocol.v5.content.StreamContent import org.apache.toree.kernel.protocol.v5.{SystemActorType, MessageType, KMBuilder} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.utils.{LogLike, ScheduledTaskManager} import scala.collection.mutable.ListBuffer import KernelOutputStream._ object KernelOutputStream { val DefaultStreamType = "stdout" val DefaultSendEmptyOutput = false } override def write(b: Int): Unit = internalBytes.synchronized { // Begin periodic flushing if this is a new set of bytes enableAutoFlush() internalBytes += b.toByte } }
Example 61
Source File: KernelInputStream.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.stream import java.io.InputStream import java.nio.charset.Charset import akka.pattern.ask import org.apache.toree.kernel.protocol.v5.content.InputRequest import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5.kernel.Utilities.timeout import org.apache.toree.kernel.protocol.v5.{KMBuilder, MessageType} import scala.collection.mutable.ListBuffer import scala.concurrent.{Await, Future} import KernelInputStream._ object KernelInputStream { val DefaultPrompt = "" val DefaultPassword = false } override def read(): Int = { if (!this.hasByte) this.requestBytes() this.nextByte() } private def hasByte: Boolean = internalBytes.nonEmpty private def nextByte(): Int = { val byte = internalBytes.head internalBytes = internalBytes.tail byte } private def requestBytes(): Unit = { val inputRequest = InputRequest(prompt, password) // NOTE: Assuming already provided parent header and correct ids val kernelMessage = kmBuilder .withHeader(MessageType.Outgoing.InputRequest) .withContentString(inputRequest) .build // NOTE: The same handler is being used in both request and reply val responseFuture: Future[String] = (actorLoader.load(MessageType.Incoming.InputReply) ? kernelMessage) .mapTo[String] // Block until we get a response import scala.concurrent.duration._ internalBytes ++= Await.result(responseFuture, Duration.Inf).getBytes(EncodingType) } }
Example 62
Source File: ZeromqKernelMessageSocket.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorSystem, ActorRef, Actor} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage //import org.apache.toree.kernel.protocol.v5.kernel.ZMQMessage import org.apache.toree.kernel.protocol.v5.KernelMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.utils.MessageLogSupport abstract class ZeromqKernelMessageSocket( actorSocketFunc: (ActorSystem, ActorRef) => ActorRef, actorForwardFunc: () => ActorSelection ) extends Actor with MessageLogSupport { val actorSocketRef = actorSocketFunc(context.system, self) val actorForwardRef = actorForwardFunc() override def receive: Receive = { case message: ZMQMessage => val kernelMessage: KernelMessage = message logMessage(kernelMessage) // Grab the strings to use for signature verification val zmqStrings = message.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) // TODO: This assumes NO extra buffers, refactor? // Forward along our message (along with the strings used for // signatures) actorForwardRef ! ((zmqStrings, kernelMessage)) case message: KernelMessage => val zmqMessage: ZMQMessage = message logMessage(message) actorSocketRef ! zmqMessage } }
Example 63
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions implicit val timeout = Timeout(21474835.seconds) implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] // TODO: Investigate better solution than setting parentHeader to null for {} val parentHeader = parseAndHandle(message.frames(delimiterIndex + 3), ParentHeader.headerReads, handler = (valid: ParentHeader) => valid, errHandler = _ => null ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U) : U = { parseAndHandle(json, reads, handler, (invalid: Seq[(JsPath, Seq[ValidationError])]) => { logger.error(s"Could not parse JSON, ${json}") throw new Throwable(s"Could not parse JSON, ${json}") } ) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U, errHandler: Seq[(JsPath, Seq[ValidationError])] => U) : U = { Json.parse(json).validate[T](reads).fold( errHandler, (content: T) => handler(content) ) } }
Example 64
Source File: ShellSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities} import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object ShellSpec { val config =""" akka { loglevel = "WARNING" }""" } class ShellSpec extends TestKit( ActorSystem( "ShellActorSpec", ConfigFactory.parseString(ShellSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Shell") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 65
Source File: StdinSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.kernel.protocol.v5Test._ import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import com.typesafe.config.ConfigFactory import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import test.utils.MaxAkkaTestTimeout object StdinSpec { val config =""" akka { loglevel = "WARNING" }""" } class StdinSpec extends TestKit(ActorSystem( "StdinActorSpec", ConfigFactory.parseString(StdinSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Stdin") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 66
Source File: ByteUtil.scala From spark-sequoiadb with Apache License 2.0 | 5 votes |
package com.sequoiadb.spark.util import scala.collection.mutable.ArrayBuffer import java.nio.charset.Charset object ByteUtil { def getBytes ( data: Short) : Array[Byte] = { ArrayBuffer[Byte]( (data&0xff).toByte, ((data&0xff00)>>8).toByte).toArray } def getBytes ( data: Char) : Array[Byte] = { ArrayBuffer[Byte]( (data&0xff).toByte).toArray } def getBytes ( data: Int) : Array[Byte] = { ArrayBuffer[Byte]( ((data&0xff).toByte), ((data&0xff00)>>8).toByte, ((data&0xff0000)>>16).toByte, ((data&0xff000000)>>24).toByte).toArray } def getBytes ( data: Long) : Array[Byte] = { ArrayBuffer[Byte]( ((data&0xff).toByte), ((data>>8)&0xff).toByte, ((data>>16)&0xff).toByte, ((data>>24)&0xff).toByte, ((data>>32)&0xff).toByte, ((data>>40)&0xff).toByte, ((data>>48)&0xff).toByte, ((data>>56)&0xff).toByte).toArray } def getBytes ( data: Float) : Array[Byte] = { getBytes ( java.lang.Float.floatToIntBits(data)) } def getBytes ( data: Double) : Array[Byte] = { getBytes ( java.lang.Double.doubleToLongBits(data)) } def getBytes ( data: String, charsetName: String = "UTF-8" ): Array[Byte] = { data.getBytes ( Charset.forName(charsetName) ) } }
Example 67
Source File: InfluxUDPClient.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.udp import java.io.File import java.net._ import java.nio.charset.{Charset, StandardCharsets} import com.github.fsanaulla.chronicler.core.components.BodyBuilder import com.github.fsanaulla.chronicler.core.model.{InfluxWriter, Point} import scala.io.Source import scala.util.{Failure, Try} final class InfluxUDPClient(host: String, port: Int) extends AutoCloseable { private[this] val socket = new DatagramSocket() private[this] def buildAndSend(msg: Array[Byte]): Try[Unit] = Try( socket.send( new DatagramPacket( msg, msg.length, new InetSocketAddress(host, port) ) ) ) def writeNative(point: String, charset: Charset = StandardCharsets.UTF_8): Try[Unit] = buildAndSend(point.getBytes(charset)) def bulkWriteNative(points: Seq[String], charset: Charset = StandardCharsets.UTF_8): Try[Unit] = buildAndSend(points.mkString("\n").getBytes(charset)) def write[T]( measurement: String, entity: T, charset: Charset = StandardCharsets.UTF_8 )(implicit writer: InfluxWriter[T] ): Try[Unit] = { BodyBuilder.stringBodyBuilder.fromT(measurement, entity) match { case Left(ex) => scala.util.Failure(ex) case Right(r) => buildAndSend(r.getBytes(charset)) } } def bulkWrite[T]( measurement: String, entities: Seq[T], charset: Charset = StandardCharsets.UTF_8 )(implicit writer: InfluxWriter[T] ): Try[Unit] = { BodyBuilder.stringBodyBuilder.fromSeqT(measurement, entities) match { case Left(ex) => Failure(ex) case Right(r) => buildAndSend(r.getBytes(charset)) } } def writeFromFile(file: File, charset: Charset = StandardCharsets.UTF_8): Try[Unit] = { val sendData = Source .fromFile(file) .getLines() .mkString("\n") .getBytes(charset) buildAndSend(sendData) } def writePoint(point: Point, charset: Charset = StandardCharsets.UTF_8): Try[Unit] = buildAndSend(point.serialize.getBytes(charset)) def bulkWritePoints(points: Seq[Point], charset: Charset = StandardCharsets.UTF_8): Try[Unit] = buildAndSend( points .map(_.serialize) .mkString("\n") .getBytes(charset) ) def close(): Unit = socket.close() }
Example 68
Source File: AhcJsonHandler.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.ahc.shared.handlers import java.nio.charset.{Charset, StandardCharsets} import com.github.fsanaulla.chronicler.core.alias.{ErrorOr, Id} import com.github.fsanaulla.chronicler.core.components.JsonHandler import com.github.fsanaulla.chronicler.core.encoding.encodingFromContentType import com.github.fsanaulla.chronicler.core.implicits._ import com.github.fsanaulla.chronicler.core.jawn.RichJParser import org.asynchttpclient.Response import org.typelevel.jawn.ast.{JParser, JValue} import scala.collection.JavaConverters._ private[ahc] final class AhcJsonHandler(compress: Boolean) extends JsonHandler[Id, Response] { def responseBody(response: Response): ErrorOr[JValue] = { val bodyBts = response.getResponseBodyAsBytes val encoding: Charset = Option(response.getContentType) .flatMap(encodingFromContentType) .map(Charset.forName) .getOrElse(StandardCharsets.UTF_8) val bodyStr = new String(bodyBts, encoding) JParser.parseFromStringEither(bodyStr) } def responseHeader(response: Response): List[(String, String)] = response.getHeaders .entries() .asScala .toList .map(e => e.getKey -> e.getValue) def responseCode(response: Response): Int = response.getStatusCode }
Example 69
Source File: PlainOioServer.scala From netty-in-action-scala with Apache License 2.0 | 5 votes |
package nia.chapter4 import java.io.IOException import java.net.ServerSocket import java.nio.charset.Charset class PlainOioServer { @throws[IOException] def serve(port: Int): Unit = { //将服务器绑定到指定端口 val socket = new ServerSocket(port) try { while (true) { val clientSocket = socket.accept System.out.println("Accepted connection from " + clientSocket) //创建一个新的线程来处理该连接 new Thread(() ⇒ { try { //将消息写给已连接的客户端 val out = clientSocket.getOutputStream out.write("Hi!\r\n".getBytes(Charset.forName("UTF-8"))) out.flush() //关闭连接 clientSocket.close() } catch { case e: IOException ⇒ e.printStackTrace() } finally { try { clientSocket.close() } catch { case ex: IOException ⇒ // ignore on close } } }).start() //启动线程 } } catch { case e: IOException ⇒ e.printStackTrace() } } }
Example 70
Source File: NettyOioServer.scala From netty-in-action-scala with Apache License 2.0 | 5 votes |
package nia.chapter4 import io.netty.bootstrap.ServerBootstrap import io.netty.buffer.Unpooled import io.netty.channel._ import io.netty.channel.oio.OioEventLoopGroup import io.netty.channel.socket.SocketChannel import io.netty.channel.socket.oio.OioServerSocketChannel import java.net.InetSocketAddress import java.nio.charset.Charset class NettyOioServer { @throws[Exception] def server(port: Int): Unit = { val buf = Unpooled.unreleasableBuffer(Unpooled.copiedBuffer("Hi!\r\n", Charset.forName("UTF-8"))) val group: EventLoopGroup = new OioEventLoopGroup try { //创建 ServerBootstrap val b = new ServerBootstrap b.group(group) //使用 OioEventLoopGroup以允许阻塞模式(旧的I/O) .channel(classOf[OioServerSocketChannel]) .localAddress(new InetSocketAddress(port)) //指定 ChannelInitializer,对于每个已接受的连接都调用它 .childHandler { new ChannelInitializer[SocketChannel]() { @throws[Exception] override def initChannel(ch: SocketChannel): Unit = { ch.pipeline.addLast(new ChannelInboundHandlerAdapter() { @throws[Exception] override def channelActive(ctx: ChannelHandlerContext): Unit = { ctx.writeAndFlush(buf.duplicate).addListener( //将消息写到客户端,并添加 ChannelFutureListener, //以便消息一被写完就关闭连接 ChannelFutureListener.CLOSE) } }) } } } //绑定服务器以接受连接 val f = b.bind.sync() f.channel.closeFuture.sync() } finally { //释放所有的资源 group.shutdownGracefully.sync() } } }
Example 71
Source File: NettyNioServer.scala From netty-in-action-scala with Apache License 2.0 | 5 votes |
package nia.chapter4 import io.netty.bootstrap.ServerBootstrap import io.netty.buffer.Unpooled import io.netty.channel._ import io.netty.channel.nio.NioEventLoopGroup import io.netty.channel.socket.SocketChannel import io.netty.channel.socket.nio.NioServerSocketChannel import java.net.InetSocketAddress import java.nio.charset.Charset class NettyNioServer { @throws[Exception] def server(port: Int): Unit = { val buf = Unpooled.unreleasableBuffer(Unpooled.copiedBuffer("Hi!\r\n", Charset.forName("UTF-8"))) //为非阻塞模式使用NioEventLoopGroup val group: EventLoopGroup = new NioEventLoopGroup try { //创建ServerBootstrap val b = new ServerBootstrap b.group(group) .channel(classOf[NioServerSocketChannel]) .localAddress(new InetSocketAddress(port)) //指定 ChannelInitializer,对于每个已接受的连接都调用它 .childHandler { new ChannelInitializer[SocketChannel]() { @throws[Exception] override def initChannel(ch: SocketChannel): Unit = { ch.pipeline.addLast(new ChannelInboundHandlerAdapter() { @throws[Exception] override def channelActive(ctx: ChannelHandlerContext): Unit = { //将消息写到客户端,并添加ChannelFutureListener, //以便消息一被写完就关闭连接 ctx.writeAndFlush(buf.duplicate) .addListener(ChannelFutureListener.CLOSE) } }) } } } //绑定服务器以接受连接 val f = b.bind.sync() f.channel.closeFuture.sync() } finally { //释放所有的资源 group.shutdownGracefully.sync() } } }
Example 72
Source File: StringInputParser.scala From avrohugger with Apache License 2.0 | 5 votes |
package avrohugger package input package parsers import reflectivecompilation.{ PackageSplitter, Toolbox } import stores.{ SchemaStore, TypecheckDependencyStore } import org.apache.avro.Protocol import org.apache.avro.Schema import org.apache.avro.Schema.Parser import org.apache.avro.SchemaParseException import org.apache.avro.compiler.idl.Idl import org.apache.avro.compiler.idl.ParseException import scala.collection.JavaConverters._ import java.nio.charset.Charset import java.io.FileNotFoundException // tries schema first, then protocol, then idl, then for case class defs class StringInputParser { lazy val schemaParser = new Parser() lazy val typecheckDependencyStore = new TypecheckDependencyStore def getSchemaOrProtocols( inputString: String, schemaStore: SchemaStore): List[Either[Schema, Protocol]] = { def trySchema(str: String): List[Either[Schema, Protocol]] = { try { List(Left(schemaParser.parse(str))) } catch { case notSchema: SchemaParseException => tryProtocol(str) case unknown: Throwable => sys.error("Unexpected exception: " + unknown) } } def tryProtocol(str: String): List[Either[Schema, Protocol]] = { try { List(Right(Protocol.parse(str))) } catch { case notProtocol: SchemaParseException => tryIDL(str) case unknown: Throwable => sys.error("Unexpected exception: " + unknown) } } def tryIDL(str: String): List[Either[Schema, Protocol]] = { try { val bytes = str.getBytes(Charset.forName("UTF-8")) val inStream = new java.io.ByteArrayInputStream(bytes) val idlParser = new Idl(inStream) val protocol = idlParser.CompilationUnit() List(Right(protocol)) } catch { case e: ParseException => { if (e.getMessage.contains("FileNotFoundException")) { sys.error("Imports not supported in String IDLs, only avdl files.") } else tryCaseClass(str, schemaStore) } case unknown: Throwable => sys.error("Unexpected exception: " + unknown) } } def tryCaseClass( str: String, schemaStore: SchemaStore): List[Either[Schema, Protocol]] = { val compilationUnits = PackageSplitter.getCompilationUnits(str) val scalaDocs = ScalaDocParser.getScalaDocs(compilationUnits) val trees = compilationUnits.map(src => Toolbox.toolBox.parse(src)) val treesZippedWithDocs = trees.zip(scalaDocs) val schemas = treesZippedWithDocs.flatMap(treeAndDocs => { val tree = treeAndDocs._1 val docs = treeAndDocs._2 TreeInputParser.parse(tree, docs, schemaStore, typecheckDependencyStore) }) schemas.map(schema => Left(schema)) } // tries schema first, then protocol, then idl, then for case class defs val schemaOrProtocols = trySchema(inputString) schemaOrProtocols } }
Example 73
Source File: WkHtmlPdf.scala From docspell with GNU General Public License v3.0 | 5 votes |
package docspell.convert.extern import java.nio.charset.Charset import java.nio.file.Path import cats.effect._ import cats.implicits._ import fs2.{Chunk, Stream} import docspell.common._ import docspell.convert.ConversionResult.Handler import docspell.convert.{ConversionResult, SanitizeHtml} object WkHtmlPdf { def toPDF[F[_]: Sync: ContextShift, A]( cfg: WkHtmlPdfConfig, chunkSize: Int, charset: Charset, sanitizeHtml: SanitizeHtml, blocker: Blocker, logger: Logger[F] )(in: Stream[F, Byte], handler: Handler[F, A]): F[A] = { val reader: (Path, SystemCommand.Result) => F[ConversionResult[F]] = ExternConv.readResult[F](blocker, chunkSize, logger) val cmdCfg = cfg.command.replace(Map("{{encoding}}" -> charset.name())) // html sanitize should (among other) remove links to invalid // protocols like cid: which is not supported by further // processing (wkhtmltopdf errors) // // Since jsoup will load everything anyways, a stream-based // conversion to java's inputstream doesn't make much sense. val inSane = Stream.evalUnChunk( Binary .loadAllBytes(in) .map(bv => sanitizeHtml(bv, charset.some)) .map(bv => Chunk.byteVector(bv)) ) ExternConv .toPDF[F, A]("wkhtmltopdf", cmdCfg, cfg.workingDir, true, blocker, logger, reader)( inSane, handler ) } }
Example 74
Source File: Markdown.scala From docspell with GNU General Public License v3.0 | 5 votes |
package docspell.convert.flexmark import java.io.{InputStream, InputStreamReader} import java.nio.charset.Charset import java.util import scala.util.Try import cats.effect.Sync import cats.implicits._ import fs2.Stream import docspell.common._ import com.vladsch.flexmark.ext.gfm.strikethrough.StrikethroughExtension import com.vladsch.flexmark.ext.tables.TablesExtension import com.vladsch.flexmark.html.HtmlRenderer import com.vladsch.flexmark.parser.Parser import com.vladsch.flexmark.util.data.{DataKey, MutableDataSet} object Markdown { def toHtml( is: InputStream, cfg: MarkdownConfig, cs: Charset ): Either[Throwable, String] = { val p = createParser() val r = createRenderer() Try { val reader = new InputStreamReader(is, cs) val doc = p.parseReader(reader) wrapHtml(r.render(doc), cfg) }.toEither } def toHtml(md: String, cfg: MarkdownConfig): String = { val p = createParser() val r = createRenderer() val doc = p.parse(md) wrapHtml(r.render(doc), cfg) } def toHtml[F[_]: Sync]( data: Stream[F, Byte], cfg: MarkdownConfig, cs: Charset ): F[String] = data.through(Binary.decode(cs)).compile.foldMonoid.map(str => toHtml(str, cfg)) private def wrapHtml(body: String, cfg: MarkdownConfig): String = s"""<!DOCTYPE html> |<html> |<head> |<meta charset="utf-8"/> |<style> |${cfg.internalCss} |</style> |</head> |<body> |$body |</body> |</html> |""".stripMargin private def createParser(): Parser = { val opts = new MutableDataSet() opts.set( Parser.EXTENSIONS.asInstanceOf[DataKey[util.Collection[_]]], util.Arrays.asList(TablesExtension.create(), StrikethroughExtension.create()) ); Parser.builder(opts).build() } private def createRenderer(): HtmlRenderer = { val opts = new MutableDataSet() HtmlRenderer.builder(opts).build() } }
Example 75
Source File: Binary.scala From docspell with GNU General Public License v3.0 | 5 votes |
package docspell.common import java.nio.charset.Charset import java.nio.charset.StandardCharsets import cats.effect._ import fs2.{Chunk, Pipe, Stream} import scodec.bits.ByteVector final case class Binary[F[_]](name: String, mime: MimeType, data: Stream[F, Byte]) { def withMime(mime: MimeType): Binary[F] = copy(mime = mime) } object Binary { def apply[F[_]](name: String, data: Stream[F, Byte]): Binary[F] = Binary[F](name, MimeType.octetStream, data) def utf8[F[_]](name: String, content: String): Binary[F] = Binary[F]( name, MimeType.octetStream, Stream.emit(content).through(fs2.text.utf8Encode) ) def text[F[_]](name: String, content: String): Binary[F] = utf8(name, content).withMime(MimeType.plain.withUtf8Charset) def text[F[_]](name: String, content: ByteVector, cs: Charset): Binary[F] = Binary(name, MimeType.plain.withCharset(cs), Stream.chunk(Chunk.byteVector(content))) def html[F[_]](name: String, content: String): Binary[F] = utf8(name, content).withMime(MimeType.html.withUtf8Charset) def html[F[_]](name: String, content: ByteVector, cs: Charset): Binary[F] = Binary(name, MimeType.html.withCharset(cs), Stream.chunk(Chunk.byteVector(content))) def decode[F[_]](cs: Charset): Pipe[F, Byte, String] = if (cs == StandardCharsets.UTF_8) fs2.text.utf8Decode else util.decode[F](cs) def loadAllBytes[F[_]: Sync](data: Stream[F, Byte]): F[ByteVector] = data.chunks.map(_.toByteVector).compile.fold(ByteVector.empty)((r, e) => r ++ e) // This is a copy from org.http4s.util // Http4s is licensed under the Apache License 2.0 private object util { import fs2._ import java.nio._ private val utf8Bom: Chunk[Byte] = Chunk(0xef.toByte, 0xbb.toByte, 0xbf.toByte) def decode[F[_]](charset: Charset): Pipe[F, Byte, String] = { val decoder = charset.newDecoder val maxCharsPerByte = math.ceil(decoder.maxCharsPerByte().toDouble).toInt val avgBytesPerChar = math.ceil(1.0 / decoder.averageCharsPerByte().toDouble).toInt val charBufferSize = 128 _.repeatPull[String] { _.unconsN(charBufferSize * avgBytesPerChar, allowFewer = true).flatMap { case None => val charBuffer = CharBuffer.allocate(1) decoder.decode(ByteBuffer.allocate(0), charBuffer, true) decoder.flush(charBuffer) val outputString = charBuffer.flip().toString if (outputString.isEmpty) Pull.done.as(None) else Pull.output1(outputString).as(None) case Some((chunk, stream)) => if (chunk.nonEmpty) { val chunkWithoutBom = skipByteOrderMark(chunk) val bytes = chunkWithoutBom.toArray val byteBuffer = ByteBuffer.wrap(bytes) val charBuffer = CharBuffer.allocate(bytes.length * maxCharsPerByte) decoder.decode(byteBuffer, charBuffer, false) val nextStream = stream.consChunk(Chunk.byteBuffer(byteBuffer.slice())) Pull.output1(charBuffer.flip().toString).as(Some(nextStream)) } else Pull.output(Chunk.empty[String]).as(Some(stream)) } } } private def skipByteOrderMark[F[_]](chunk: Chunk[Byte]): Chunk[Byte] = if (chunk.size >= 3 && chunk.take(3) == utf8Bom) chunk.drop(3) else chunk } }
Example 76
Source File: CapitalizedInputReaderTrait.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.structural.decorator import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream} import java.nio.charset.Charset import java.util.Base64 import java.util.zip.GZIPOutputStream import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader} import com.typesafe.scalalogging.LazyLogging trait CapitalizedInputReaderTrait extends InputReader { abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase) } trait CompressingInputReaderTrait extends InputReader with LazyLogging { abstract override def readLines(): Stream[String] = super.readLines().map { case line => val text = line.getBytes(Charset.forName("UTF-8")) logger.info("Length before compression: {}", text.length.toString) val output = new ByteArrayOutputStream() val compressor = new GZIPOutputStream(output) try { compressor.write(text, 0, text.length) val outputByteArray = output.toByteArray logger.info("Length after compression: {}", outputByteArray.length.toString) new String(outputByteArray, Charset.forName("UTF-8")) } finally { compressor.close() output.close() } } } trait Base64EncoderInputReaderTrait extends InputReader { abstract override def readLines(): Stream[String] = super.readLines().map { case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8"))) } } object StackableTraitsExample { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait reader.readLines().foreach(println) } finally { stream.close() } } } object StackableTraitsBigExample { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait reader.readLines().foreach(println) } finally { stream.close() } } }
Example 77
Source File: InputReaderDecorator.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.structural.decorator import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader} import java.nio.charset.Charset import java.util.Base64 import java.util.zip.GZIPOutputStream import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader} import com.typesafe.scalalogging.LazyLogging abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader { override def readLines(): Stream[String] = inputReader.readLines() } class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) { override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase) } class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging { override def readLines(): Stream[String] = super.readLines().map { case line => val text = line.getBytes(Charset.forName("UTF-8")) logger.info("Length before compression: {}", text.length.toString) val output = new ByteArrayOutputStream() val compressor = new GZIPOutputStream(output) try { compressor.write(text, 0, text.length) val outputByteArray = output.toByteArray logger.info("Length after compression: {}", outputByteArray.length.toString) new String(outputByteArray, Charset.forName("UTF-8")) } finally { compressor.close() output.close() } } } class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) { override def readLines(): Stream[String] = super.readLines().map { case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8"))) } } object DecoratorExample { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new CapitalizedInputReader(new AdvancedInputReader(stream)) reader.readLines().foreach(println) } finally { stream.close() } } } object DecoratorExampleBig { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new CompressingInputReader( new Base64EncoderInputReader( new CapitalizedInputReader( new AdvancedInputReader(stream) ) ) ) reader.readLines().foreach(println) } finally { stream.close() } } }
Example 78
Source File: CapitalizedInputReaderTrait.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.structural.decorator import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream} import java.nio.charset.Charset import java.util.Base64 import java.util.zip.GZIPOutputStream import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader} import com.typesafe.scalalogging.LazyLogging trait CapitalizedInputReaderTrait extends InputReader { abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase) } trait CompressingInputReaderTrait extends InputReader with LazyLogging { abstract override def readLines(): Stream[String] = super.readLines().map { case line => val text = line.getBytes(Charset.forName("UTF-8")) logger.info("Length before compression: {}", text.length.toString) val output = new ByteArrayOutputStream() val compressor = new GZIPOutputStream(output) try { compressor.write(text, 0, text.length) val outputByteArray = output.toByteArray logger.info("Length after compression: {}", outputByteArray.length.toString) new String(outputByteArray, Charset.forName("UTF-8")) } finally { compressor.close() output.close() } } } trait Base64EncoderInputReaderTrait extends InputReader { abstract override def readLines(): Stream[String] = super.readLines().map { case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8"))) } } object StackableTraitsExample { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait reader.readLines().foreach(println) } finally { stream.close() } } } object StackableTraitsBigExample { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait reader.readLines().foreach(println) } finally { stream.close() } } }
Example 79
Source File: InputReaderDecorator.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.structural.decorator import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader} import java.nio.charset.Charset import java.util.Base64 import java.util.zip.GZIPOutputStream import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader} import com.typesafe.scalalogging.LazyLogging abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader { override def readLines(): Stream[String] = inputReader.readLines() } class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) { override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase) } class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging { override def readLines(): Stream[String] = super.readLines().map { case line => val text = line.getBytes(Charset.forName("UTF-8")) logger.info("Length before compression: {}", text.length.toString) val output = new ByteArrayOutputStream() val compressor = new GZIPOutputStream(output) try { compressor.write(text, 0, text.length) val outputByteArray = output.toByteArray logger.info("Length after compression: {}", outputByteArray.length.toString) new String(outputByteArray, Charset.forName("UTF-8")) } finally { compressor.close() output.close() } } } class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) { override def readLines(): Stream[String] = super.readLines().map { case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8"))) } } object DecoratorExample { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new CapitalizedInputReader(new AdvancedInputReader(stream)) reader.readLines().foreach(println) } finally { stream.close() } } } object DecoratorExampleBig { def main(args: Array[String]): Unit = { val stream = new BufferedReader( new InputStreamReader( new BufferedInputStream(this.getClass.getResourceAsStream("data.txt")) ) ) try { val reader = new CompressingInputReader( new Base64EncoderInputReader( new CapitalizedInputReader( new AdvancedInputReader(stream) ) ) ) reader.readLines().foreach(println) } finally { stream.close() } } }
Example 80
Source File: PlantUMLRenderer.scala From gitbucket-plantuml-plugin with Apache License 2.0 | 5 votes |
package com.yotaichino.gitbucket.plugins.plantuml import gitbucket.core.plugin.Renderer import gitbucket.core.plugin.RenderRequest import gitbucket.core.util.StringUtil import java.nio.charset.Charset import java.util.Base64 import play.twirl.api.Html class PlantUMLRenderer extends Renderer { override def render(request: RenderRequest): Html = { Html(imgEmbedded(request.fileContent)) } def imgEmbedded(content: String): String = { val raw = PlantUMLUtils.generateSVGImage(content) raw match { case null => { val c = StringUtil.escapeHtml(content) s"""<pre class="prettyprint linenums blob">$c</pre>""" } case _ => { val src = Base64.getEncoder.encodeToString(raw) s"""<img src="data:image/svg+xml;charset=utf-8;base64,$src">""" } } } }
Example 81
Source File: LimitingJavaOutputStreams.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil.io import java.nio.charset.Charset class LayerBoundaryMarkInsertingJavaOutputStream(jos: java.io.OutputStream, boundaryMark: String, charset: Charset) extends java.io.FilterOutputStream(jos) { private var closed = false private val boundaryMarkBytes = boundaryMark.getBytes(charset) override def close(): Unit = { if (!closed) { jos.write(boundaryMarkBytes) jos.flush() jos.close() closed = true } } }
Example 82
Source File: Base64Transformer.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil.layers import org.apache.daffodil.schema.annotation.props.gen.LayerLengthKind import org.apache.daffodil.schema.annotation.props.gen.LayerLengthUnits import org.apache.daffodil.util.Maybe import org.apache.daffodil.processors.LayerLengthInBytesEv import org.apache.daffodil.processors.LayerBoundaryMarkEv import org.apache.daffodil.processors.LayerCharsetEv import org.apache.daffodil.io.BoundaryMarkLimitingStream import org.apache.daffodil.processors.parsers.PState import org.apache.daffodil.processors.charset.BitsCharsetJava import java.nio.charset.Charset import org.apache.daffodil.processors.charset.BitsCharset import org.apache.daffodil.exceptions.Assert import org.apache.daffodil.processors.unparsers.UState import org.apache.daffodil.io.LayerBoundaryMarkInsertingJavaOutputStream import org.apache.daffodil.dsom.DPathCompileInfo class Base64MIMETransformer(layerCharsetEv: LayerCharsetEv, layerBoundaryMarkEv: LayerBoundaryMarkEv) extends LayerTransformer() { override def wrapLayerDecoder(jis: java.io.InputStream): java.io.InputStream = { val b64 = java.util.Base64.getMimeDecoder().wrap(jis) b64 } override def wrapLimitingStream(jis: java.io.InputStream, state: PState) = { val layerCharset: BitsCharset = layerCharsetEv.evaluate(state) val layerBoundaryMark = layerBoundaryMarkEv.evaluate(state) val javaCharset: Charset = layerCharset match { case jbcs: BitsCharsetJava => jbcs.javaCharset case _ => Assert.invariantFailed("Not a java-compatible charset: " + layerCharset) } val s = BoundaryMarkLimitingStream(jis, layerBoundaryMark, javaCharset) s } override protected def wrapLayerEncoder(jos: java.io.OutputStream): java.io.OutputStream = { val b64 = java.util.Base64.getMimeEncoder().wrap(jos) b64 } override protected def wrapLimitingStream(jos: java.io.OutputStream, state: UState): java.io.OutputStream = { val layerCharset: BitsCharset = layerCharsetEv.evaluate(state) val layerBoundaryMark = layerBoundaryMarkEv.evaluate(state) val javaCharset: Charset = layerCharset match { case jbcs: BitsCharsetJava => jbcs.javaCharset case _ => Assert.invariantFailed("Not a java-compatible charset: " + layerCharset) } val newJOS = new LayerBoundaryMarkInsertingJavaOutputStream(jos, layerBoundaryMark, javaCharset) newJOS } } object Base64MIMETransformerFactory extends LayerTransformerFactory("base64_MIME") { override def newInstance( maybeLayerCharsetEv: Maybe[LayerCharsetEv], maybeLayerLengthKind: Maybe[LayerLengthKind], maybeLayerLengthInBytesEv: Maybe[LayerLengthInBytesEv], maybeLayerLengthUnits: Maybe[LayerLengthUnits], maybeLayerBoundaryMarkEv: Maybe[LayerBoundaryMarkEv], tci: DPathCompileInfo): LayerTransformer = { tci.schemaDefinitionUnless( scala.util.Properties.isJavaAtLeast("1.8"), "Base64 layer support requires Java 8 (aka Java 1.8).") tci.schemaDefinitionUnless( maybeLayerBoundaryMarkEv.isDefined, "Property dfdlx:layerBoundaryMark was not defined.") tci.schemaDefinitionUnless( maybeLayerLengthKind.isEmpty || (maybeLayerLengthKind.get eq LayerLengthKind.BoundaryMark), "Only dfdlx:layerLengthKind 'boundaryMark' is supported, but '%s' was specified", maybeLayerLengthKind.get.toString) tci.schemaDefinitionUnless( maybeLayerCharsetEv.isDefined, "Property dfdlx:layerEncoding must be defined.") val xformer = new Base64MIMETransformer(maybeLayerCharsetEv.get, maybeLayerBoundaryMarkEv.get) xformer } }
Example 83
Source File: EventSerializer.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.iam.io import java.nio.charset.Charset import akka.actor.ExtendedActorSystem import akka.serialization.SerializerWithStringManifest import ch.epfl.bluebrain.nexus.iam.acls.AclEvent import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent import ch.epfl.bluebrain.nexus.iam.types.GrantType.Camel._ import ch.epfl.bluebrain.nexus.rdf.Iri.Url import ch.epfl.bluebrain.nexus.rdf.implicits._ import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig import ch.epfl.bluebrain.nexus.service.config.Settings import io.circe.generic.extras.Configuration import io.circe.generic.extras.semiauto._ import io.circe.parser._ import io.circe.syntax._ import io.circe.{Decoder, Encoder, Printer} class EventSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest { private val utf8 = Charset.forName("UTF-8") private val printer = Printer.noSpaces.copy(dropNullValues = true) implicit private[io] val http: HttpConfig = Settings(system).serviceConfig.http implicit private[io] val config: Configuration = Configuration.default.withDiscriminator("@type") implicit private[io] val urlEncoder: Encoder[Url] = Encoder.encodeString.contramap(_.asUri) implicit private[io] val urlDecoder: Decoder[Url] = Decoder.decodeString.emap(Url.apply) implicit private[io] val permissionEventEncoder: Encoder[PermissionsEvent] = deriveConfiguredEncoder[PermissionsEvent] implicit private[io] val permissionEventDecoder: Decoder[PermissionsEvent] = deriveConfiguredDecoder[PermissionsEvent] implicit private[io] val aclEventEncoder: Encoder[AclEvent] = deriveConfiguredEncoder[AclEvent] implicit private[io] val aclEventDecoder: Decoder[AclEvent] = deriveConfiguredDecoder[AclEvent] implicit private[io] val realmEventEncoder: Encoder[RealmEvent] = deriveConfiguredEncoder[RealmEvent] implicit private[io] val realmEventDecoder: Decoder[RealmEvent] = deriveConfiguredDecoder[RealmEvent] override val identifier: Int = 1225 override def manifest(o: AnyRef): String = o match { case _: PermissionsEvent => "permissions-event" case _: AclEvent => "acl-event" case _: RealmEvent => "realm-event" case other => throw new IllegalArgumentException( s"Cannot determine manifest for unknown type: '${other.getClass.getCanonicalName}'" ) } override def toBinary(o: AnyRef): Array[Byte] = o match { case ev: PermissionsEvent => ev.asJson.printWith(printer).getBytes(utf8) case ev: AclEvent => ev.asJson.printWith(printer).getBytes(utf8) case ev: RealmEvent => ev.asJson.printWith(printer).getBytes(utf8) case other => throw new IllegalArgumentException(s"Cannot serialize unknown type: '${other.getClass.getCanonicalName}'") } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { case "permissions-event" => val str = new String(bytes, utf8) decode[PermissionsEvent](str) .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'PermissionsEvent'")) case "acl-event" => val str = new String(bytes, utf8) decode[AclEvent](str) .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'AclEvent'")) case "realm-event" => val str = new String(bytes, utf8) decode[RealmEvent](str) .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'RealmEvent'")) case other => throw new IllegalArgumentException(s"Cannot deserialize type with unknown manifest: '$other'") } }
Example 84
Source File: ProcessBuilderUtils.scala From scalastringcourseday7 with Apache License 2.0 | 5 votes |
package util import java.io.ByteArrayInputStream import java.nio.charset.{Charset, CodingErrorAction} import text.StringOption import scala.collection.mutable.ListBuffer import scala.io.{Codec, Source} import scala.sys.process.ProcessBuilder object ProcessBuilderUtils { implicit def processToProcessUtils(repr: ProcessBuilder): ProcessBuilderUtils = { new ProcessBuilderUtils(repr) } } class ProcessBuilderUtils(repr: ProcessBuilder) { def lineStream(encoding: Charset, onMalformedInput: CodingErrorAction, onUnmappableCharacter: CodingErrorAction, replacementOpt: StringOption): Iterator[String] = { val lines: Iterator[String] = repr.lineStream_!.iterator val byteBuffer = ListBuffer.empty[Byte] while (lines.hasNext) { val line: String = lines.next.trim concat "\n" byteBuffer ++= line.getBytes } implicit val codec = Codec(encoding). onMalformedInput(onMalformedInput). onUnmappableCharacter(onUnmappableCharacter) if (replacementOpt.nonEmpty) { codec.decodingReplaceWith(replacementOpt.get) } Source.fromInputStream(new ByteArrayInputStream(byteBuffer.toArray)).getLines } }
Example 85
Source File: HttpClient.scala From scala-influxdb-client with MIT License | 5 votes |
package com.paulgoldbaum.influxdbclient import java.nio.charset.Charset import org.asynchttpclient._ import org.asynchttpclient.Realm.{AuthScheme, Builder} import scala.concurrent.{ExecutionContext, Future, Promise} import scala.collection.JavaConverters._ protected class HttpClient(val host: String, val port: Int, val https: Boolean = false, val username: String = null, val password: String = null, val clientConfig: HttpConfig = null)(implicit ec: ExecutionContext) { private val authenticationRealm = makeAuthenticationRealm() private var connectionClosed = false private val client: AsyncHttpClient = if (clientConfig == null) new DefaultAsyncHttpClient() else new DefaultAsyncHttpClient(clientConfig.build()) private val protocol = if (https) "https" else "http" def get(url: String, params: Map[String, String] = Map()): Future[HttpResponse] = { val requestBuilder = client.prepareGet("%s://%s:%d%s".format(protocol, host, port, url)) .setRealm(authenticationRealm) requestBuilder.setQueryParams(params.map(p => new Param(p._1, p._2)).toList.asJava) makeRequest(requestBuilder) } def post(url: String, params: Map[String, String] = Map(), content: String): Future[HttpResponse] = { val requestBuilder = client.preparePost("%s://%s:%d%s".format(protocol, host, port, url)) .setRealm(authenticationRealm) .setBody(content) .setCharset(Charset.forName("UTF-8")) requestBuilder.setQueryParams(params.map(p => new Param(p._1, p._2)).toList.asJava) makeRequest(requestBuilder) } private def makeRequest(requestBuilder: BoundRequestBuilder): Future[HttpResponse] = { val resultPromise = Promise[HttpResponse]() if (isClosed) return resultPromise.failure(new HttpException("Connection is already closed")).future requestBuilder.execute(new ResponseHandler(resultPromise)) resultPromise.future } def close() = { if (isClosed) throw new HttpException("Connection is already closed") client.close() connectionClosed = true } def isClosed = connectionClosed private def makeAuthenticationRealm(): Realm = username match { case null => null case _ => new Builder(username, password) .setUsePreemptiveAuth(true) .setScheme(AuthScheme.BASIC) .build() } private class ResponseHandler(promise: Promise[HttpResponse]) extends AsyncCompletionHandler[Response] { override def onCompleted(response: Response): Response = { if (response.getStatusCode >= 400) promise.failure(new HttpException(s"Server answered with error code ${response.getStatusCode}. Message: ${response.getResponseBody}", response.getStatusCode)) else promise.success(new HttpResponse(response.getStatusCode, response.getResponseBody)) response } override def onThrowable(throwable: Throwable) = { promise.failure(new HttpException("An error occurred during the request", -1, throwable)) } } } class HttpException protected[influxdbclient] (val str: String, val code: Int = -1, val throwable: Throwable = null) extends Exception(str, throwable) {} case class HttpResponse(code: Int, content: String) case class HttpJsonResponse(code: Int, content: Map[String, Object])
Example 86
Source File: FileCredentials.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.credentials import java.io.{File, FileInputStream, StringReader} import java.nio.charset.Charset import java.nio.file.{Files, Paths} import java.util.Properties import dataclass.data import scala.collection.JavaConverters._ @data class FileCredentials( path: String, optional: Boolean = true ) extends Credentials { def get(): Seq[DirectCredentials] = { val f = Paths.get(path) if (Files.isRegularFile(f)) { val content = new String(Files.readAllBytes(f), Charset.defaultCharset()) FileCredentials.parse(content, path) } else if (optional) Nil else throw new Exception(s"Credential file $path not found") } } object FileCredentials { def parse(content: String, origin: String): Seq[DirectCredentials] = { val props = new Properties props.load(new StringReader(content)) val userProps = props .propertyNames() .asScala .map(_.asInstanceOf[String]) .filter(_.endsWith(".username")) .toVector userProps.map { userProp => val prefix = userProp.stripSuffix(".username") val user = props.getProperty(userProp) val password = Option(props.getProperty(s"$prefix.password")).getOrElse { throw new Exception(s"Property $prefix.password not found in $origin") } val host = Option(props.getProperty(s"$prefix.host")).getOrElse { throw new Exception(s"Property $prefix.host not found in $origin") } val realmOpt = Option(props.getProperty(s"$prefix.realm")) // filter if empty? val matchHost = Option(props.getProperty(s"$prefix.auto")).fold(DirectCredentials.defaultMatchHost)(_.toBoolean) val httpsOnly = Option(props.getProperty(s"$prefix.https-only")).fold(DirectCredentials.defaultHttpsOnly)(_.toBoolean) val passOnRedirect = Option(props.getProperty(s"$prefix.pass-on-redirect")).fold(false)(_.toBoolean) DirectCredentials(host, user, password) .withRealm(realmOpt) .withMatchHost(matchHost) .withHttpsOnly(httpsOnly) .withPassOnRedirect(passOnRedirect) } } }
Example 87
Source File: FileHelper.scala From spark-nlp with Apache License 2.0 | 5 votes |
package com.johnsnowlabs.util import java.io.{File, IOException} import java.nio.charset.Charset import java.nio.file.{Files, Paths} import java.security.MessageDigest import java.text.DecimalFormat import org.apache.commons.io.FileUtils object FileHelper { def writeLines(file: String, lines: Seq[String], encoding: String = "UTF-8"): Unit = { val writer = Files.newBufferedWriter(Paths.get(file), Charset.forName(encoding)) try { var cnt = 0 for (line <- lines) { writer.write(line) if (cnt > 0) writer.write(System.lineSeparator()) cnt += 1 } } catch { case ex: IOException => ex.printStackTrace() } finally if (writer != null) writer.close() } def delete(file: String, throwOnError: Boolean = false): Unit = { val f = new File(file) if (f.exists()) { try { if (f.isDirectory) FileUtils.deleteDirectory(f) else FileUtils.deleteQuietly(f) } catch { case e: Exception => if (throwOnError) throw e else FileUtils.forceDeleteOnExit(f) } } } def generateChecksum(path: String): String = { val arr = Files readAllBytes (Paths get path) val checksum = MessageDigest.getInstance("MD5") digest arr checksum.map("%02X" format _).mkString } def getHumanReadableFileSize(size: Long): String = { if (size <= 0) return "0" val units = Array[String]("B", "KB", "MB", "GB", "TB", "PB", "EB") val digitGroups = (Math.log10(size) / Math.log10(1024)).toInt new DecimalFormat("#,##0.#").format(size / Math.pow(1024, digitGroups)) + " " + units(digitGroups) } }
Example 88
Source File: ReaderInputStream.scala From better-files with MIT License | 5 votes |
package better.files import java.io.{InputStream, Reader} import java.nio.{ByteBuffer, CharBuffer} import java.nio.charset.{Charset, CharsetEncoder, CoderResult, CodingErrorAction} import scala.annotation.tailrec private[this] val encoderOut = ByteBuffer.allocate(bufferSize >> 4).flip().asInstanceOf[ByteBuffer] private[this] var lastCoderResult = CoderResult.UNDERFLOW private[this] var endOfInput = false private[this] def fillBuffer() = { assert(!endOfInput) if (lastCoderResult.isUnderflow) { val position = encoderIn.compact().position() // We don't use Reader#read(CharBuffer) here because it is more efficient to write directly to the underlying char array // since the default implementation copies data to a temporary char array anyway reader.read(encoderIn.array, position, encoderIn.remaining) match { case EOF => endOfInput = true case c => encoderIn.position(position + c) } encoderIn.flip() } lastCoderResult = encoder.encode(encoderIn, encoderOut.compact(), endOfInput) encoderOut.flip() } override def read(b: Array[Byte], off: Int, len: Int) = { if (len < 0 || off < 0 || (off + len) > b.length) throw new IndexOutOfBoundsException("Array Size=" + b.length + ", offset=" + off + ", length=" + len) if (len == 0) { 0 // Always return 0 if len == 0 } else { var read = 0 @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) { if (encoderOut.hasRemaining) { val c = encoderOut.remaining min len encoderOut.get(b, off, c) read += c loop(off + c, len - c) } else if (!endOfInput) { fillBuffer() loop(off, len) } } loop(off, len) if (read == 0 && endOfInput) EOF else read } } @tailrec final override def read() = { if (encoderOut.hasRemaining) { encoderOut.get & 0xff } else if (endOfInput) { EOF } else { fillBuffer() read() } } override def close() = reader.close() }
Example 89
Source File: WriterOutputStream.scala From better-files with MIT License | 5 votes |
package better.files import java.io.{OutputStream, Writer} import java.nio.charset.{Charset, CharsetDecoder, CodingErrorAction} import java.nio.{ByteBuffer, CharBuffer} import scala.annotation.tailrec private[this] val decoderIn = ByteBuffer.allocate(bufferSize >> 4) def this( writer: Writer, bufferSize: Int = DefaultBufferSize, flushImmediately: Boolean = false )(implicit charset: Charset = DefaultCharset ) = this( writer = writer, decoder = charset.newDecoder .onMalformedInput(CodingErrorAction.REPLACE) .onUnmappableCharacter(CodingErrorAction.REPLACE) .replaceWith("?"), bufferSize = bufferSize, flushImmediately = flushImmediately ) override def write(b: Array[Byte], off: Int, len: Int) = { @tailrec def loop(off: Int, len: Int): Unit = if (len > 0) { val c = decoderIn.remaining min len decoderIn.put(b, off, c) processInput(endOfInput = false) loop(off + c, len - c) } loop(off, len) if (flushImmediately) flushOutput() } override def write(b: Int) = write(Array(b.toByte)) override def flush() = { flushOutput() writer.flush() } override def close() = { processInput(endOfInput = true) flushOutput() writer.close() } private[this] def processInput(endOfInput: Boolean) = { decoderIn.flip() @tailrec def loop(): Unit = { val coderResult = decoder.decode(decoderIn, decoderOut, endOfInput) if (coderResult.isOverflow) { flushOutput() loop() } else { assert(coderResult.isUnderflow, "decoder is configured to replace malformed input and unmappable characters") } } loop() decoderIn.compact() } private[this] def flushOutput(): Unit = { val p = decoderOut.position() if (p > 0) { writer.write(decoderOut.array, 0, p) val _ = decoderOut.rewind() } } }
Example 90
Source File: package.scala From better-files with MIT License | 5 votes |
package better import java.io.StreamTokenizer import java.nio.charset.Charset import scala.collection.mutable import scala.util.{Failure, Success, Try} package object files extends Implicits { private[files] def tryAll[A](xs: Seq[A])(f: A => Unit): Unit = { val res = xs.foldLeft(Option.empty[Throwable]) { case (currError, a) => Try(f(a)) match { case Success(_) => currError case Failure(e) => Some(e) } } res.foreach(throwable => throw throwable) } private[files] def toHex(bytes: Array[Byte]): String = String.format("%0" + (bytes.length << 1) + "X", new java.math.BigInteger(1, bytes)) }
Example 91
Source File: EncodingBenchmark.scala From better-files with MIT License | 5 votes |
package better.files.benchmarks import better.files._ import java.nio.charset.Charset import scala.util.Random class EncodingBenchmark extends Benchmark { def testWrite(file: File, charset: Charset) = profile { for { writer <- file.bufferedWriter(charset) content <- Iterator.continually(Random.nextString(10000)).take(1000) } writer.write(content + "\n") } def testRead(file: File, charset: Charset) = profile { for { reader <- file.bufferedReader(charset) line <- reader.lines().autoClosed } line } def run(charset: Charset) = { File.temporaryFile() foreach { file => val (_, w) = testWrite(file, charset) info(s"Charset=$charset, write=$w ms") val (_, r) = testRead(file, charset) info(s"Charset=$charset, read=$r ms") } } test("encoding") { val utf8 = Charset.forName("UTF-8") run(charset = utf8) info("-------------") run(charset = UnicodeCharset(utf8)) } }
Example 92
Source File: Conversions.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.messagebus import java.nio.charset.Charset import play.api.libs.json._ object Conversions { type Converter[S, T] = S => T type ByteMarshaller[T] = Converter[T, Array[Byte]] type ByteUnmarshaller[T] = Converter[Array[Byte], T] object Marshallers { val FromString: ByteMarshaller[String] = (msg: String) => msg.getBytes("utf-8") def FromJsonBackedType[T]()(implicit writes: Writes[T]): ByteMarshaller[T] = msg => { val jsonString = Json.toJson(msg).toString() FromString(jsonString) } } object Unmarshallers { val ToString: ByteUnmarshaller[String] = (bytes: Array[Byte]) => new String(bytes, Charset.forName("UTF-8")) def ToJsonBackedType[T]()(implicit reads: Reads[T]): ByteUnmarshaller[T] = msg => { Json.parse(msg).validate[T] match { case JsSuccess(v, _) => v case JsError(err) => throw new Exception(s"Invalid json message format: $err") } } } }
Example 93
Source File: FileReader.scala From ncdbg with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.programmaticallyspeaking.ncd.infra import java.io.File import java.nio.charset.Charset import java.nio.file.Files import scala.util.Try trait FileReader { def read(file: File, charset: Charset): Try[String] } class FileSystemFileReader extends FileReader { override def read(file: File, charset: Charset): Try[String] = { Try { val bytes = Files.readAllBytes(file.toPath) new String(bytes, charset) } } }
Example 94
Source File: OAuth.scala From iot-demo with Apache License 2.0 | 5 votes |
package core import java.net.URLEncoder import java.nio.charset.Charset import javax.crypto import org.parboiled.common.Base64 import org.slf4j.LoggerFactory import spray.http.HttpHeaders.RawHeader import spray.http.{ContentType, HttpEntity, HttpRequest, MediaTypes} import scala.collection.immutable.TreeMap object OAuth { private[this] val log = LoggerFactory.getLogger(getClass) def oAuthAuthorizer(consumer: Consumer, token: Token): HttpRequest => HttpRequest = { // construct the key and cryptographic entity val SHA1 = "HmacSHA1" val keyString = percentEncode(consumer.secret :: token.secret :: Nil) val key = new crypto.spec.SecretKeySpec(bytes(keyString), SHA1) val mac = crypto.Mac.getInstance(SHA1) { httpRequest: HttpRequest => val timestamp = (System.currentTimeMillis / 1000).toString // nonce is unique enough for our purposes here val nonce = System.nanoTime.toString // pick out x-www-form-urlencoded body val (requestParams, newEntity) = httpRequest.entity match { case [email protected](ContentType(MediaTypes.`application/x-www-form-urlencoded`, _), data) => log.info("request {}", request) val params = data.asString.split("&") val pairs = params.map { param => val p = param.split("=") p(0) -> percentEncode(p(1)) } (pairs.toMap, HttpEntity(ContentType(MediaTypes.`application/x-www-form-urlencoded`), "%s=%s" format(pairs(0)._1, pairs(0)._2))) case e => (Map(), e) } // prepare the OAuth parameters val oauthParams = Map( "oauth_consumer_key" -> consumer.key, "oauth_signature_method" -> "HMAC-SHA1", "oauth_timestamp" -> timestamp, "oauth_nonce" -> nonce, "oauth_token" -> token.value, "oauth_version" -> "1.0" ) // construct parts of the signature base string val encodedOrderedParams = (TreeMap[String, String]() ++ oauthParams ++ requestParams) map { case (k, v) => k + "=" + v } mkString "&" val url = httpRequest.uri.toString() // construct the signature base string val signatureBaseString = percentEncode(httpRequest.method.toString() :: url :: encodedOrderedParams :: Nil) mac.init(key) val sig = Base64.rfc2045().encodeToString(mac.doFinal(bytes(signatureBaseString)), false) mac.reset() val oauth = TreeMap[String, String]() ++ (oauthParams + ("oauth_signature" -> percentEncode(sig))) map { case (k, v) => "%s=\"%s\"" format(k, v) } mkString ", " // return the signed request httpRequest.withHeaders(List(RawHeader("Authorization", "OAuth " + oauth))).withEntity(newEntity) } } private def percentEncode(str: String): String = URLEncoder.encode(str, "UTF-8") replace("+", "%20") replace("%7E", "~") private def percentEncode(s: Seq[String]): String = s map percentEncode mkString "&" private def bytes(str: String) = str.getBytes(Charset.forName("UTF-8")) case class Consumer(key: String, secret: String) case class Token(value: String, secret: String) }
Example 95
Source File: Using.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.jawa.core.compiler.compile.io import java.io.{Closeable, FileInputStream, FileOutputStream, InputStream, OutputStream, File => JavaFile} import java.io.{BufferedInputStream, BufferedOutputStream, InputStreamReader, OutputStreamWriter} import java.io.{BufferedReader, BufferedWriter} import java.util.zip.GZIPInputStream import java.net.URL import java.nio.channels.FileChannel import java.nio.charset.Charset import java.util.jar.{JarFile, JarInputStream, JarOutputStream} import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream} import ErrorHandling.translate import scala.reflect.{Manifest => SManifest} abstract class Using[Source, T] { protected def open(src: Source): T def apply[R](src: Source)(f: T => R): R = { val resource = open(src) try { f(resource) } finally { close(resource) } } protected def close(out: T): Unit } abstract class WrapUsing[Source, T](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends Using[Source, T] { protected def label[S](m: SManifest[S]): String = m.runtimeClass.getSimpleName protected def openImpl(source: Source): T protected final def open(source: Source): T = translate("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ") { openImpl(source) } } trait OpenFile[T] extends Using[JavaFile, T] { protected def openImpl(file: JavaFile): T protected final def open(file: JavaFile): T = { val parent = file.getParentFile if(parent != null) IO.createDirectory(parent) openImpl(file) } } object Using { def wrap[Source, T<: Closeable](openF: Source => T)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] = wrap(openF, closeCloseable) def wrap[Source, T](openF: Source => T, closeF: T => Unit)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] = new WrapUsing[Source, T] { def openImpl(source: Source): T = openF(source) def close(t: T): Unit = closeF(t) } def resource[Source, T <: Closeable](openF: Source => T): Using[Source,T] = resource(openF, closeCloseable) def resource[Source, T](openF: Source => T, closeF: T => Unit): Using[Source,T] = new Using[Source,T] { def open(s: Source): T = openF(s) def close(s: T): Unit = closeF(s) } def file[T <: Closeable](openF: JavaFile => T): OpenFile[T] = file(openF, closeCloseable) def file[T](openF: JavaFile => T, closeF: T => Unit): OpenFile[T] = new OpenFile[T] { def openImpl(file: JavaFile): T = openF(file) def close(t: T): Unit = closeF(t) } private def closeCloseable[T <: Closeable]: T => Unit = _.close() def bufferedOutputStream: Using[OutputStream, BufferedOutputStream] = wrap((out: OutputStream) => new BufferedOutputStream(out) ) def bufferedInputStream: Using[InputStream, BufferedInputStream] = wrap((in: InputStream) => new BufferedInputStream(in) ) def fileOutputStream(append: Boolean = false): OpenFile[BufferedOutputStream] = file(f => new BufferedOutputStream(new FileOutputStream(f, append))) def fileInputStream: OpenFile[BufferedInputStream] = file(f => new BufferedInputStream(new FileInputStream(f))) def urlInputStream: Using[URL, BufferedInputStream] = resource((u: URL) => translate("Error opening " + u + ": ")(new BufferedInputStream(u.openStream))) def fileOutputChannel: OpenFile[FileChannel] = file(f => new FileOutputStream(f).getChannel) def fileInputChannel: OpenFile[FileChannel] = file(f => new FileInputStream(f).getChannel) def fileWriter(charset: Charset = IO.utf8, append: Boolean = false): OpenFile[BufferedWriter] = file(f => new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) ) def fileReader(charset: Charset): OpenFile[BufferedReader] = file(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) ) def urlReader(charset: Charset): Using[URL, BufferedReader] = resource((u: URL) => new BufferedReader(new InputStreamReader(u.openStream, charset))) def jarFile(verify: Boolean): OpenFile[JarFile] = file(f => new JarFile(f, verify), (_: JarFile).close()) def zipFile: OpenFile[ZipFile] = file(f => new ZipFile(f), (_: ZipFile).close()) def streamReader: Using[(InputStream, Charset), InputStreamReader] = wrap{ (_: (InputStream, Charset)) match { case (in, charset) => new InputStreamReader(in, charset) } } def gzipInputStream: Using[InputStream, GZIPInputStream] = wrap((in: InputStream) => new GZIPInputStream(in, 8192) ) def zipInputStream: Using[InputStream, ZipInputStream] = wrap((in: InputStream) => new ZipInputStream(in)) def zipOutputStream: Using[OutputStream, ZipOutputStream] = wrap((out: OutputStream) => new ZipOutputStream(out)) def gzipOutputStream: Using[OutputStream, GZIPOutputStream] = wrap((out: OutputStream) => new GZIPOutputStream(out, 8192), (_: GZIPOutputStream).finish()) def jarOutputStream: Using[OutputStream, JarOutputStream] = wrap((out: OutputStream) => new JarOutputStream(out)) def jarInputStream: Using[InputStream, JarInputStream] = wrap((in: InputStream) => new JarInputStream(in)) def zipEntry(zip: ZipFile): Using[ZipEntry, InputStream] = resource((entry: ZipEntry) => translate("Error opening " + entry.getName + " in " + zip + ": ") { zip.getInputStream(entry) } ) }
Example 96
Source File: FinaglePostgresDecoders.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.finagle.postgres import java.nio.charset.Charset import java.time.{ LocalDate, LocalDateTime, ZoneId } import java.util.{ Date, UUID } import com.twitter.finagle.postgres.values.ValueDecoder import com.twitter.util.Return import com.twitter.util.Throw import com.twitter.util.Try import io.getquill.FinaglePostgresContext import io.getquill.util.Messages.fail import io.netty.buffer.ByteBuf trait FinaglePostgresDecoders { this: FinaglePostgresContext[_] => import ValueDecoder._ type Decoder[T] = FinaglePostgresDecoder[T] case class FinaglePostgresDecoder[T]( vd: ValueDecoder[T], default: Throwable => T = (e: Throwable) => fail(e.getMessage) ) extends BaseDecoder[T] { override def apply(index: Index, row: ResultRow): T = row.getTry[T](index)(vd) match { case Return(r) => r case Throw(e) => default(e) } def orElse[U](f: U => T)(implicit vdu: ValueDecoder[U]): FinaglePostgresDecoder[T] = { val mappedVd = vdu.map[T](f) FinaglePostgresDecoder[T]( new ValueDecoder[T] { def decodeText(recv: String, text: String): Try[T] = { val t = vd.decodeText(recv, text) if (t.isReturn) t else mappedVd.decodeText(recv, text) } def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[T] = { val t = vd.decodeBinary(recv, bytes, charset) if (t.isReturn) t else mappedVd.decodeBinary(recv, bytes, charset) } } ) } } implicit def decoderDirectly[T](implicit vd: ValueDecoder[T]): Decoder[T] = FinaglePostgresDecoder(vd) def decoderMapped[U, T](f: U => T)(implicit vd: ValueDecoder[U]): Decoder[T] = FinaglePostgresDecoder(vd.map[T](f)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = FinaglePostgresDecoder[Option[T]]( new ValueDecoder[Option[T]] { def decodeText(recv: String, text: String): Try[Option[T]] = Return(d.vd.decodeText(recv, text).toOption) def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[Option[T]] = Return(d.vd.decodeBinary(recv, bytes, charset).toOption) }, _ => None ) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = decoderMapped[I, O](mapped.f)(d.vd) implicit val stringDecoder: Decoder[String] = decoderDirectly[String] implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoderDirectly[BigDecimal] implicit val booleanDecoder: Decoder[Boolean] = decoderDirectly[Boolean] implicit val shortDecoder: Decoder[Short] = decoderDirectly[Short] implicit val byteDecoder: Decoder[Byte] = decoderMapped[Short, Byte](_.toByte) implicit val intDecoder: Decoder[Int] = decoderDirectly[Int].orElse[Long](_.toInt) implicit val longDecoder: Decoder[Long] = decoderDirectly[Long].orElse[Int](_.toLong) implicit val floatDecoder: Decoder[Float] = decoderDirectly[Float].orElse[Double](_.toFloat) implicit val doubleDecoder: Decoder[Double] = decoderDirectly[Double] implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoderDirectly[Array[Byte]] implicit val dateDecoder: Decoder[Date] = decoderMapped[LocalDateTime, Date](d => Date.from(d.atZone(ZoneId.systemDefault()).toInstant)) implicit val localDateDecoder: Decoder[LocalDate] = decoderDirectly[LocalDate].orElse[LocalDateTime](_.toLocalDate) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoderDirectly[LocalDateTime].orElse[LocalDate](_.atStartOfDay) implicit val uuidDecoder: Decoder[UUID] = decoderDirectly[UUID] }
Example 97
Source File: JAsyncContextConfig.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jasync import java.nio.charset.Charset import java.lang.{ Long => JavaLong } import scala.jdk.CollectionConverters._ import scala.util.Try import com.github.jasync.sql.db.ConcreteConnection import com.github.jasync.sql.db.{ ConnectionPoolConfiguration, ConnectionPoolConfigurationBuilder } import com.github.jasync.sql.db.pool.ConnectionPool import com.github.jasync.sql.db.Configuration import com.github.jasync.sql.db.SSLConfiguration import com.github.jasync.sql.db.pool.ObjectFactory import com.github.jasync.sql.db.util.AbstractURIParser import com.typesafe.config.Config abstract class JAsyncContextConfig[C <: ConcreteConnection]( config: Config, connectionFactory: Configuration => ObjectFactory[C], uriParser: AbstractURIParser ) { private def getValue[T](path: String, getter: String => T) = Try(getter(path)) private def getString(path: String) = getValue(path, config.getString).toOption private def getInt(path: String) = getValue(path, config.getInt).toOption private def getLong(path: String) = getValue(path, config.getLong).toOption private lazy val urlConfiguration: Configuration = getValue("url", config.getString) .map(uriParser.parseOrDie(_, uriParser.getDEFAULT.getCharset)) .getOrElse(uriParser.getDEFAULT) private lazy val default = new ConnectionPoolConfigurationBuilder().build() lazy val connectionPoolConfiguration = new ConnectionPoolConfiguration( getString("host").getOrElse(urlConfiguration.getHost), getInt("port").getOrElse(urlConfiguration.getPort), getString("database").orElse(Option(urlConfiguration.getDatabase)).orNull, getString("username").getOrElse(urlConfiguration.getUsername), getString("password").orElse(Option(urlConfiguration.getPassword)).orNull, getInt("maxActiveConnections").getOrElse(default.getMaxActiveConnections), getLong("maxIdleTime").getOrElse(default.getMaxIdleTime), getInt("maxPendingQueries").getOrElse(default.getMaxPendingQueries), getLong("connectionValidationInterval").getOrElse(default.getConnectionValidationInterval), getLong("connectionCreateTimeout").getOrElse(default.getConnectionCreateTimeout), getLong("connectionTestTimeout").getOrElse(default.getConnectionTestTimeout), getLong("queryTimeout") .orElse(Option(urlConfiguration.getQueryTimeout).map(_.toMillis)).map(JavaLong.valueOf).orNull, urlConfiguration.getEventLoopGroup, urlConfiguration.getExecutionContext, default.getCoroutineDispatcher, new SSLConfiguration( Map( "sslmode" -> getString("sslmode"), "sslrootcert" -> getString("sslrootcert") ).collect { case (key, Some(value)) => key -> value }.asJava ), Try(Charset.forName(config.getString("charset"))).getOrElse(urlConfiguration.getCharset), getInt("maximumMessageSize").getOrElse(urlConfiguration.getMaximumMessageSize), urlConfiguration.getAllocator, getString("applicationName").orElse(Option(urlConfiguration.getApplicationName)).orNull, urlConfiguration.getInterceptors, getLong("maxConnectionTtl").map(JavaLong.valueOf).orElse(Option(default.getMaxConnectionTtl)).orNull ) def pool = new ConnectionPool[C]( connectionFactory(connectionPoolConfiguration.getConnectionConfiguration), connectionPoolConfiguration ) }
Example 98
Source File: MysqlJAsyncContextConfigSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.jasync.mysql import java.nio.charset.Charset import com.typesafe.config.ConfigFactory import io.getquill.{ MysqlJAsyncContextConfig, Spec } import scala.jdk.CollectionConverters._ class MysqlJAsyncContextConfigSpec extends Spec { "extracts valid data from configs" in { val c = ConfigFactory.parseMap(Map( "url" -> "jdbc:postgresql://github.com:5233/db?user=p", "pass" -> "pass", "queryTimeout" -> "123", "host" -> "github.com", "port" -> "5233", "charset" -> "UTF-8", "username" -> "p", "password" -> "pass", "maximumMessageSize" -> "456", "connectionTestTimeout" -> "789" ).asJava) val conf = MysqlJAsyncContextConfig(c).connectionPoolConfiguration conf.getQueryTimeout mustBe 123L conf.getConnectionTestTimeout mustBe 789L conf.getMaximumMessageSize mustBe 456 conf.getCharset mustBe Charset.forName("UTF-8") conf.getHost mustBe "github.com" conf.getPort mustBe 5233 conf.getUsername mustBe "p" conf.getPassword mustBe "pass" } "parses url and passes valid data to configuration" in { val c = ConfigFactory.parseMap(Map( "url" -> "jdbc:mysql://host:5233/db?user=p", "pass" -> "pass", "queryTimeout" -> "123", "host" -> "github.com", "port" -> "5233", "charset" -> "UTF-8", "password" -> "pass", "maximumMessageSize" -> "456", "connectionTestTimeout" -> "789" ).asJava) val conf = MysqlJAsyncContextConfig(c).connectionPoolConfiguration conf.getQueryTimeout mustBe 123L conf.getConnectionTestTimeout mustBe 789L conf.getMaximumMessageSize mustBe 456 conf.getCharset mustBe Charset.forName("UTF-8") conf.getHost mustBe "github.com" conf.getPort mustBe 5233 conf.getUsername mustBe "p" conf.getPassword mustBe "pass" } }
Example 99
Source File: MysqlAsyncContextConfigSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.async.mysql import java.nio.charset.Charset import com.typesafe.config.ConfigFactory import io.getquill.{ MysqlAsyncContextConfig, Spec } import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ class MysqlAsyncContextConfigSpec extends Spec { "extracts valid data from configs" in { val c = ConfigFactory.parseMap(Map( "url" -> "jdbc:postgresql://github.com:5233/db?user=p", "pass" -> "pass", "queryTimeout" -> "123 s", "host" -> "github.com", "port" -> "5233", "charset" -> "UTF-8", "user" -> "p", "password" -> "pass", "maximumMessageSize" -> "456", "connectTimeout" -> "789 s" ).asJava) val conf = MysqlAsyncContextConfig(c) conf.queryTimeout mustBe Some(123.seconds) conf.connectTimeout mustBe Some(789.seconds) conf.maximumMessageSize mustBe Some(456) conf.charset mustBe Some(Charset.forName("UTF-8")) conf.host mustBe Some("github.com") conf.port mustBe Some(5233) conf.user mustBe Some("p") conf.password mustBe Some("pass") } "parses url and passes valid data to configuration" in { val c = ConfigFactory.parseMap(Map( "url" -> "jdbc:mysql://host:5233/db?user=p", "pass" -> "pass", "queryTimeout" -> "123 s", "host" -> "github.com", "port" -> "5233", "charset" -> "UTF-8", "password" -> "pass", "maximumMessageSize" -> "456", "connectTimeout" -> "789 s" ).asJava) val conf = MysqlAsyncContextConfig(c) conf.configuration.queryTimeout mustBe Some(123.seconds) conf.configuration.connectTimeout mustBe 789.seconds conf.configuration.maximumMessageSize mustBe 456 conf.configuration.charset mustBe Charset.forName("UTF-8") conf.configuration.host mustBe "github.com" conf.configuration.port mustBe 5233 conf.configuration.username mustBe "p" conf.configuration.password mustBe Some("pass") } }
Example 100
Source File: AsyncContextConfig.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.async import java.nio.charset.Charset import com.github.mauricio.async.db.Configuration import com.github.mauricio.async.db.Connection import com.github.mauricio.async.db.SSLConfiguration import com.github.mauricio.async.db.pool.ObjectFactory import com.github.mauricio.async.db.pool.PartitionedConnectionPool import com.github.mauricio.async.db.pool.PoolConfiguration import com.github.mauricio.async.db.util.AbstractURIParser import com.typesafe.config.Config import scala.concurrent.duration.Duration import scala.util.Try abstract class AsyncContextConfig[C <: Connection]( config: Config, connectionFactory: Configuration => ObjectFactory[C], uriParser: AbstractURIParser ) { def url = Try(config.getString("url")).toOption def user = Try(config.getString("user")).toOption def password = Try(config.getString("password")).toOption def database = Try(config.getString("database")).toOption def port = Try(config.getInt("port")).toOption def host = Try(config.getString("host")).toOption def sslProps = Map( "sslmode" -> Try(config.getString("sslmode")).toOption, "sslrootcert" -> Try(config.getString("sslrootcert")).toOption ).collect { case (key, Some(value)) => key -> value } def charset = Try(Charset.forName(config.getString("charset"))).toOption def maximumMessageSize = Try(config.getInt("maximumMessageSize")).toOption def connectTimeout = Try(Duration(config.getString("connectTimeout"))).toOption def testTimeout = Try(Duration(config.getString("testTimeout"))).toOption def queryTimeout = Try(Duration(config.getString("queryTimeout"))).toOption def configuration = { var c = url match { case Some(url) => uriParser.parseOrDie(url) case _ => uriParser.DEFAULT } user.foreach(p => c = c.copy(username = p)) if (password.nonEmpty) { c = c.copy(password = password) } if (database.nonEmpty) { c = c.copy(database = database) } port.foreach(p => c = c.copy(port = p)) host.foreach(p => c = c.copy(host = p)) c = c.copy(ssl = SSLConfiguration(sslProps)) charset.foreach(p => c = c.copy(charset = p)) maximumMessageSize.foreach(p => c = c.copy(maximumMessageSize = p)) connectTimeout.foreach(p => c = c.copy(connectTimeout = p)) testTimeout.foreach(p => c = c.copy(testTimeout = p)) c = c.copy(queryTimeout = queryTimeout) c } private val defaultPoolConfig = PoolConfiguration.Default def poolMaxObjects = Try(config.getInt("poolMaxObjects")).getOrElse(defaultPoolConfig.maxObjects) def poolMaxIdle = Try(config.getLong("poolMaxIdle")).getOrElse(defaultPoolConfig.maxIdle) def poolMaxQueueSize = Try(config.getInt("poolMaxQueueSize")).getOrElse(defaultPoolConfig.maxQueueSize) def poolValidationInterval = Try(config.getLong("poolValidationInterval")).getOrElse(defaultPoolConfig.validationInterval) def poolConfiguration = PoolConfiguration( maxObjects = poolMaxObjects, maxIdle = poolMaxIdle, maxQueueSize = poolMaxQueueSize, validationInterval = poolValidationInterval ) def numberOfPartitions = Try(config.getInt("poolNumberOfPartitions")).getOrElse(4) def pool = new PartitionedConnectionPool[C]( connectionFactory(configuration), poolConfiguration, numberOfPartitions ) }
Example 101
Source File: JsonMQDeserializer.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.Charset import spray.json._ import ai.deepsense.workflowexecutor.communication.mq.MQDeserializer import ai.deepsense.workflowexecutor.communication.mq.json.Constants.JsonKeys._ class JsonMQDeserializer( jsonDeserializers: Seq[JsonMessageDeserializer], parent: Option[JsonMQDeserializer] = None) extends MQDeserializer with JsonMessageDeserializer { private val combinedJsonDeserializers = { jsonDeserializers.tail.foldLeft(jsonDeserializers.head.deserialize) { case (acc, deserializer) => acc.orElse(deserializer.deserialize) } } override val deserialize: PartialFunction[(String, JsObject), Any] = { parent match { case Some(p) => combinedJsonDeserializers.orElse(p.deserialize) case None => combinedJsonDeserializers } } override def deserializeMessage(data: Array[Byte]): Any = { val json = new String(data, Global.charset).parseJson val jsObject = json.asJsObject val fields = jsObject.fields import spray.json.DefaultJsonProtocol._ val messageType = getField(fields, messageTypeKey).convertTo[String] val body = getField(fields, messageBodyKey).asJsObject() deserialize(messageType, body) } def orElse(next: JsonMQDeserializer): JsonMQDeserializer = new JsonMQDeserializer(jsonDeserializers, Some(next)) private def getField(fields: Map[String, JsValue], fieldName: String): JsValue = { try { fields(fieldName) } catch { case e: NoSuchElementException => throw new DeserializationException(s"Missing field: $fieldName", e) } } }
Example 102
Source File: Global.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.Charset import ai.deepsense.deeplang.CatalogRecorder import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader import ai.deepsense.models.json.workflow.InferredStateJsonProtocol import ai.deepsense.models.json.workflow.InferredStateJsonProtocol._ import ai.deepsense.models.json.workflow.ExecutionReportJsonProtocol._ import ai.deepsense.models.workflows.{ExecutionReport, InferredState} import ai.deepsense.workflowexecutor.communication.message.global._ import ai.deepsense.workflowexecutor.communication.message.global.HeartbeatJsonProtocol._ import ai.deepsense.workflowexecutor.communication.message.global.PoisonPillJsonProtocol._ import ai.deepsense.workflowexecutor.communication.message.global.ReadyJsonProtocol._ import ai.deepsense.workflowexecutor.communication.message.global.LaunchJsonProtocol._ object Global { val charset = Charset.forName("UTF-8") val dOperationsCatalog = CatalogRecorder.resourcesCatalogRecorder.catalogs.operations val graphReader = new GraphReader(dOperationsCatalog) val inferredStateJsonProtocol = InferredStateJsonProtocol(graphReader) import inferredStateJsonProtocol._ import Constants.MessagesTypes._ object HeartbeatDeserializer extends DefaultJsonMessageDeserializer[Heartbeat](heartbeat) object HeartbeatSerializer extends DefaultJsonMessageSerializer[Heartbeat](heartbeat) object PoisonPillDeserializer extends DefaultJsonMessageDeserializer[PoisonPill](poisonPill) object PoisonPillSerializer extends DefaultJsonMessageSerializer[PoisonPill](poisonPill) object ReadyDeserializer extends DefaultJsonMessageDeserializer[Ready](ready) object ReadySerializer extends DefaultJsonMessageSerializer[Ready](ready) object LaunchDeserializer extends DefaultJsonMessageDeserializer[Launch](launch) object LaunchSerializer extends DefaultJsonMessageSerializer[Launch](launch) object ExecutionReportSerializer extends DefaultJsonMessageSerializer[ExecutionReport](executionReport) object ExecutionReportDeserializer extends DefaultJsonMessageDeserializer[ExecutionReport](executionReport) object InferredStateSerializer extends DefaultJsonMessageSerializer[InferredState](inferredState) object InferredStateDeserializer extends DefaultJsonMessageDeserializer[InferredState](inferredState) object GlobalMQSerializer extends JsonMQSerializer( Seq(HeartbeatSerializer, PoisonPillSerializer, ReadySerializer, LaunchSerializer, ExecutionReportSerializer, InferredStateSerializer )) object GlobalMQDeserializer extends JsonMQDeserializer( Seq(HeartbeatDeserializer, PoisonPillDeserializer, ReadyDeserializer, LaunchDeserializer, ExecutionReportDeserializer, InferredStateDeserializer )) }
Example 103
Source File: JsonMQSerializer.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.Charset import spray.json.JsObject import ai.deepsense.workflowexecutor.communication.mq.MQSerializer class JsonMQSerializer( jsonSerializers: Seq[JsonMessageSerializer], parent: Option[JsonMQSerializer] = None ) extends MQSerializer with JsonMessageSerializer { private val combinedJsonSerializers = { jsonSerializers.tail.foldLeft(jsonSerializers.head.serialize) { case (acc, serializer) => acc.orElse(serializer.serialize) } } override val serialize: PartialFunction[Any, JsObject] = { parent match { case Some(p) => combinedJsonSerializers.orElse(p.serialize) case None => combinedJsonSerializers } } override def serializeMessage(message: Any): Array[Byte] = { serialize(message).compactPrint.getBytes(Global.charset) } def orElse(next: JsonMQSerializer): JsonMQSerializer = new JsonMQSerializer(jsonSerializers, Some(next)) }
Example 104
Source File: ProtocolJsonSerializerSpec.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.workflowexecutor.communication.mq.serialization.json import java.nio.charset.Charset import org.scalatest.mockito.MockitoSugar import spray.json._ import ai.deepsense.commons.StandardSpec import ai.deepsense.commons.models.Entity import ai.deepsense.deeplang.DOperable import ai.deepsense.deeplang.doperables.ColumnsFilterer import ai.deepsense.graph._ import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader import ai.deepsense.models.json.workflow.{ExecutionReportJsonProtocol, InferredStateJsonProtocol, WorkflowWithResultsJsonProtocol} import ai.deepsense.models.workflows._ import ai.deepsense.reportlib.model.factory.ReportContentTestFactory import ai.deepsense.workflowexecutor.communication.message.global._ import ai.deepsense.workflowexecutor.communication.message.workflow.Synchronize class ProtocolJsonSerializerSpec extends StandardSpec with MockitoSugar with WorkflowWithResultsJsonProtocol with InferredStateJsonProtocol with HeartbeatJsonProtocol { override val graphReader: GraphReader = mock[GraphReader] "ProtocolJsonSerializer" should { val protocolJsonSerializer = ProtocolJsonSerializer(graphReader) "serialize Synchronize messages" in { protocolJsonSerializer.serializeMessage(Synchronize()) shouldBe expectedSerializationResult("synchronize", JsObject()) } } private def expectedSerializationResult(messageType: String, jsonObject: JsValue): Array[Byte] = { JsObject( "messageType" -> JsString(messageType), "messageBody" -> jsonObject ).compactPrint.getBytes(Charset.forName("UTF-8")) } }
Example 105
Source File: EventPluginUtilities.scala From tensorflow_scala with Apache License 2.0 | 5 votes |
package org.platanios.tensorflow.api.io.events import org.platanios.tensorflow.api.core.exception.{InvalidArgumentException, NotFoundException} import java.nio.charset.Charset import java.nio.file.{Files, Path} import scala.jdk.CollectionConverters._ def retrievePluginAsset(logDir: Path, pluginName: String, assetName: String): String = { val assetPath = pluginDir(logDir, pluginName).resolve(assetName) try { new String(Files.readAllBytes(assetPath), Charset.forName("UTF-8")) } catch { case _: NotFoundException => throw InvalidArgumentException(s"Asset path '$assetPath' not found.") case t: Throwable => throw InvalidArgumentException(s"Could not read asset path '$assetPath'.", t) } } }
Example 106
Source File: Base64Test.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.features.types import java.nio.charset.Charset import com.salesforce.op.test.TestCommon import org.apache.commons.io.IOUtils import org.junit.runner.RunWith import org.scalatest.PropSpec import org.scalatest.junit.JUnitRunner import org.scalatest.prop.PropertyChecks @RunWith(classOf[JUnitRunner]) class Base64Test extends PropSpec with PropertyChecks with TestCommon { property("handle empty") { forAll(None) { (v: Option[String]) => Base64(v).asBytes shouldBe None Base64(v).asString shouldBe None Base64(v).asInputStream shouldBe None } } property("can show byte contents") { forAll { (b: Array[Byte]) => val b64 = toBase64(b) (Base64(b64).asBytes map (_.toList)) shouldBe Some(b.toList) } } property("can show string contents") { forAll { (s: String) => val b64 = toBase64(s.getBytes) Base64(b64).asString shouldBe Some(s) } } property("produce a stream") { forAll { (s: String) => val b64 = toBase64(s.getBytes) Base64(b64).asInputStream.map(IOUtils.toString(_, Charset.defaultCharset())) shouldBe Some(s) } } property("produce a stream and map over it") { forAll { (s: String) => val b64 = toBase64(s.getBytes) Base64(b64).mapInputStream(IOUtils.toString(_, Charset.defaultCharset())) shouldBe Some(s) } } def toBase64(b: Array[Byte]): String = new String(java.util.Base64.getEncoder.encode(b)) }
Example 107
Source File: EventSerializer.scala From nexus-iam with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.iam.io import java.nio.charset.Charset import akka.actor.ExtendedActorSystem import akka.serialization.SerializerWithStringManifest import ch.epfl.bluebrain.nexus.iam.acls.AclEvent import ch.epfl.bluebrain.nexus.iam.config.AppConfig.HttpConfig import ch.epfl.bluebrain.nexus.iam.config.Settings import ch.epfl.bluebrain.nexus.iam.permissions.PermissionsEvent import ch.epfl.bluebrain.nexus.iam.realms.RealmEvent import ch.epfl.bluebrain.nexus.iam.types.GrantType.Camel._ import ch.epfl.bluebrain.nexus.rdf.Iri.Url import ch.epfl.bluebrain.nexus.rdf.implicits._ import io.circe.generic.extras.Configuration import io.circe.generic.extras.semiauto._ import io.circe.parser._ import io.circe.syntax._ import io.circe.{Decoder, Encoder, Printer} class EventSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest { private val utf8 = Charset.forName("UTF-8") private val printer = Printer.noSpaces.copy(dropNullValues = true) private[io] implicit val http: HttpConfig = Settings(system).appConfig.http private[io] implicit val config: Configuration = Configuration.default.withDiscriminator("@type") private[io] implicit val urlEncoder: Encoder[Url] = Encoder.encodeString.contramap(_.asUri) private[io] implicit val urlDecoder: Decoder[Url] = Decoder.decodeString.emap(Url.apply) private[io] implicit val permissionEventEncoder: Encoder[PermissionsEvent] = deriveConfiguredEncoder[PermissionsEvent] private[io] implicit val permissionEventDecoder: Decoder[PermissionsEvent] = deriveConfiguredDecoder[PermissionsEvent] private[io] implicit val aclEventEncoder: Encoder[AclEvent] = deriveConfiguredEncoder[AclEvent] private[io] implicit val aclEventDecoder: Decoder[AclEvent] = deriveConfiguredDecoder[AclEvent] private[io] implicit val realmEventEncoder: Encoder[RealmEvent] = deriveConfiguredEncoder[RealmEvent] private[io] implicit val realmEventDecoder: Decoder[RealmEvent] = deriveConfiguredDecoder[RealmEvent] override val identifier: Int = 1225 override def manifest(o: AnyRef): String = o match { case _: PermissionsEvent => "permissions-event" case _: AclEvent => "acl-event" case _: RealmEvent => "realm-event" case other => throw new IllegalArgumentException( s"Cannot determine manifest for unknown type: '${other.getClass.getCanonicalName}'" ) } override def toBinary(o: AnyRef): Array[Byte] = o match { case ev: PermissionsEvent => ev.asJson.printWith(printer).getBytes(utf8) case ev: AclEvent => ev.asJson.printWith(printer).getBytes(utf8) case ev: RealmEvent => ev.asJson.printWith(printer).getBytes(utf8) case other => throw new IllegalArgumentException(s"Cannot serialize unknown type: '${other.getClass.getCanonicalName}'") } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { case "permissions-event" => val str = new String(bytes, utf8) decode[PermissionsEvent](str) .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'PermissionsEvent'")) case "acl-event" => val str = new String(bytes, utf8) decode[AclEvent](str) .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'AclEvent'")) case "realm-event" => val str = new String(bytes, utf8) decode[RealmEvent](str) .getOrElse(throw new IllegalArgumentException(s"Cannot deserialize value: '$str' to 'RealmEvent'")) case other => throw new IllegalArgumentException(s"Cannot deserialize type with unknown manifest: '$other'") } }
Example 108
Source File: BytesReader.scala From OUTDATED_ledger-wallet-android with MIT License | 5 votes |
package co.ledger.wallet.core.utils import java.math.BigInteger import java.nio.charset.Charset import org.bitcoinj.core.VarInt class BytesReader(val bytes: Array[Byte]) { private[this] var _offset = 0 def read(length: Int): Array[Byte] = { val offset = _offset seek(length) bytes.slice(offset, _offset) } def seek(length: Int): Unit = { if (length > available) { throw new IndexOutOfBoundsException(s"Invalid length ($length) is greater than available byte to read ($available)") } val offset = _offset _offset = offset + (if (length >= 0) length else available) } def readString(length: Int, charset: Charset = Charset.defaultCharset()): String = { new String(read(length), charset) } def readBigInteger(length: Int, signum: Int = 1): BigInteger = { new BigInteger(1, read(length)) } def readNextShort(): Short = readBigInteger(2).shortValue() def readNextInt(): Int = readBigInteger(4).intValue() def readNextLong(): Long = readBigInteger(8).longValue() def readLeBigInteger(length: Int, signum: Int = 1): BigInteger = { new BigInteger(1, read(length).reverse) } def readNextLeShort(): Int = readLeBigInteger(2).intValue() def readNextLeInt(): Int = readLeBigInteger(4).intValue() def readNextLeLong(): Long = readLeBigInteger(8).longValue() def readNextByte(): Byte = read(1)(0) def readNextVarInt(): VarInt = { val varInt = new VarInt(bytes, _offset) seek(varInt.getOriginalSizeInBytes) varInt } def available: Int = bytes.length - _offset def length: Int = bytes.length def apply(index: Int): Byte = bytes(index) }
Example 109
Source File: Util.scala From ratatool with Apache License 2.0 | 5 votes |
package com.spotify.ratatool.samplers.util import java.nio.charset.Charset import com.google.common.hash.{Funnel, HashCode, Hasher} import com.google.common.io.BaseEncoding trait SampleDistribution case object StratifiedDistribution extends SampleDistribution case object UniformDistribution extends SampleDistribution object SampleDistribution { def fromString(s: String): SampleDistribution = { if (s == "stratified") { StratifiedDistribution } else if (s == "uniform") { UniformDistribution } else { throw new IllegalArgumentException(s"Invalid distribution $s") } } } trait Determinism case object NonDeterministic extends Determinism case object Deterministic extends Determinism object Determinism { def fromSeq(l: Seq[_]): Determinism = { if (l == Seq()) { NonDeterministic } else { Deterministic } } } trait Precision case object Approximate extends Precision case object Exact extends Precision object Precision { def fromBoolean(exact: Boolean): Precision = { if (exact) { Exact } else { Approximate } } } trait ByteEncoding case object RawEncoding extends ByteEncoding case object HexEncoding extends ByteEncoding case object Base64Encoding extends ByteEncoding object ByteEncoding { def fromString(s: String): ByteEncoding = { if(s == "raw") { RawEncoding } else if(s == "hex") { HexEncoding } else if(s == "base64") { Base64Encoding } else { throw new IllegalArgumentException(s"Invalid byte encoding $s") } } }
Example 110
Source File: HandshakeInitSpec.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql package protocol package server import cats.effect._ import io.netty.buffer.Unpooled import java.nio.charset.Charset class HandshakeInitSpec extends Spec { val CS = Charset.defaultCharset() "HandshakeInit" - { "decode init packet" - { "MariaDB10" in { Decoder.decode[HandshakeInit](Bytes.MariaDB10, CS) should be('right) } "MySQL56" in { Decoder.decode[HandshakeInit](Bytes.MySQL56, CS) should be('right) } } } object Bytes { val MariaDB10Bytes = HexDump.decode( """0a352e352e352d31302e312e33312d4d617269614442002500000026712d277d614c3a00fff7e002003fa015000000000000000000007b2335234c376f4859687e61006d7973716c5f6e61746976655f70617373776f726400""" ) val MySQL56Bytes = HexDump.decode( """0a352e372e31332d6c6f6700160c0000533f5d042025172900ffff210200ffc1150000000000000000000027105a290c1f3a71111b5b68006d7973716c5f6e61746976655f70617373776f726400""" ) val MariaDB10 = Unpooled.wrappedBuffer(MariaDB10Bytes) val MySQL56 = Unpooled.wrappedBuffer(MySQL56Bytes) } }
Example 111
Source File: EncodeOutputs.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.server.internal import java.nio.charset.Charset import sttp.model.{HeaderNames, StatusCode} import sttp.tapir.internal.{Params, ParamsAsAny, SplitParams} import sttp.tapir.{CodecFormat, EndpointIO, EndpointOutput, Mapping, RawBodyType, StreamingEndpointIO} import scala.util.Try class EncodeOutputs[B](encodeOutputBody: EncodeOutputBody[B]) { def apply(output: EndpointOutput[_], value: Params, ov: OutputValues[B]): OutputValues[B] = { output match { case s: EndpointOutput.Single[_] => applySingle(s, value, ov) case s: EndpointIO.Single[_] => applySingle(s, value, ov) case EndpointOutput.Pair(left, right, _, split) => applyPair(left, right, split, value, ov) case EndpointIO.Pair(left, right, _, split) => applyPair(left, right, split, value, ov) case EndpointOutput.Void() => throw new IllegalArgumentException("Cannot encode a void output!") } } private def applyPair( left: EndpointOutput[_], right: EndpointOutput[_], split: SplitParams, params: Params, ov: OutputValues[B] ): OutputValues[B] = { val (leftParams, rightParams) = split(params) apply(right, rightParams, apply(left, leftParams, ov)) } private def applySingle(output: EndpointOutput.Single[_], value: Params, ov: OutputValues[B]): OutputValues[B] = { def encoded[T]: T = output._mapping.asInstanceOf[Mapping[T, Any]].encode(value.asAny) output match { case EndpointIO.Empty(_, _) => ov case EndpointOutput.FixedStatusCode(sc, _, _) => ov.withStatusCode(sc) case EndpointIO.FixedHeader(header, _, _) => ov.withHeader(header.name -> header.value) case EndpointIO.Body(rawValueType, codec, _) => ov.withBody(encodeOutputBody.rawValueToBody(encoded, codec.format, rawValueType)) case EndpointIO.StreamBodyWrapper(StreamingEndpointIO.Body(codec, _, charset)) => ov.withBody(encodeOutputBody.streamValueToBody(encoded, codec.format, charset)) case EndpointIO.Header(name, _, _) => encoded[List[String]].foldLeft(ov) { case (ovv, headerValue) => ovv.withHeader((name, headerValue)) } case EndpointIO.Headers(_, _) => encoded[List[sttp.model.Header]].foldLeft(ov)((ov2, h) => ov2.withHeader((h.name, h.value))) case EndpointIO.MappedPair(wrapped, _) => apply(wrapped, ParamsAsAny(encoded), ov) case EndpointOutput.StatusCode(_, _, _) => ov.withStatusCode(encoded[StatusCode]) case EndpointOutput.OneOf(mappings, _) => val enc = encoded[Any] val mapping = mappings .find(mapping => mapping.appliesTo(enc)) .getOrElse(throw new IllegalArgumentException(s"No status code mapping for value: $enc, in output: $output")) apply(mapping.output, ParamsAsAny(enc), mapping.statusCode.map(ov.withStatusCode).getOrElse(ov)) case EndpointOutput.MappedPair(wrapped, _) => apply(wrapped, ParamsAsAny(encoded), ov) } } } case class OutputValues[B](body: Option[B], headers: Vector[(String, String)], statusCode: Option[StatusCode]) { def withBody(b: B): OutputValues[B] = { if (body.isDefined) { throw new IllegalArgumentException("Body is already defined") } copy(body = Some(b)) } def withHeader(h: (String, String)): OutputValues[B] = copy(headers = headers :+ h) def withStatusCode(sc: StatusCode): OutputValues[B] = copy(statusCode = Some(sc)) def contentLength: Option[Long] = headers .collectFirst { case (k, v) if HeaderNames.ContentLength.equalsIgnoreCase(k) => v } .flatMap(v => Try(v.toLong).toOption) } object OutputValues { def empty[B]: OutputValues[B] = OutputValues[B](None, Vector.empty, None) } trait EncodeOutputBody[B] { def rawValueToBody(v: Any, format: CodecFormat, bodyType: RawBodyType[_]): B def streamValueToBody(v: Any, format: CodecFormat, charset: Option[Charset]): B }
Example 112
Source File: FinatraRequestToRawBody.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.server.finatra import java.io.ByteArrayInputStream import java.nio.ByteBuffer import java.nio.charset.Charset import com.twitter.finagle.http.Request import com.twitter.finatra.http.request.RequestUtils import com.twitter.io.Buf import com.twitter.util.Future import org.apache.commons.fileupload.FileItemHeaders import sttp.model.{Part, Header} import sttp.tapir.{RawPart, RawBodyType} import scala.collection.immutable.Seq import scala.collection.JavaConverters._ class FinatraRequestToRawBody(serverOptions: FinatraServerOptions) { def apply[R](bodyType: RawBodyType[R], body: Buf, charset: Option[Charset], request: Request): Future[R] = { def asByteArray: Array[Byte] = { val array = new Array[Byte](body.length) body.write(array, 0) array } def asByteBuffer: ByteBuffer = { val buffer = ByteBuffer.allocate(body.length) body.write(buffer) buffer.flip() buffer } bodyType match { case RawBodyType.StringBody(defaultCharset) => Future.value[R](new String(asByteArray, charset.getOrElse(defaultCharset))) case RawBodyType.ByteArrayBody => Future.value[R](asByteArray) case RawBodyType.ByteBufferBody => Future.value[R](asByteBuffer) case RawBodyType.InputStreamBody => Future.value[R](new ByteArrayInputStream(asByteArray)) case RawBodyType.FileBody => serverOptions.createFile(asByteArray) case m: RawBodyType.MultipartBody => multiPartRequestToRawBody(request, m) } } private def parseDispositionParams(headerValue: Option[String]): Map[String, String] = headerValue .map( _.split(";") .map(_.trim) .tail .map(_.split("=")) .map(array => array(0) -> array(1)) .toMap ) .getOrElse(Map.empty) private def getCharset(contentType: Option[String]): Option[Charset] = contentType.flatMap( _.split(";") .map(_.trim) .tail .map(_.split("=")) .map(array => array(0) -> array(1)) .toMap .get("charset") .map(Charset.forName) ) private def multiPartRequestToRawBody(request: Request, m: RawBodyType.MultipartBody): Future[Seq[RawPart]] = { def fileItemHeaders(headers: FileItemHeaders): Seq[Header] = { headers.getHeaderNames.asScala .flatMap { name => headers.getHeaders(name).asScala.map(name -> _) } .toSeq .filter(_._1.toLowerCase != "content-disposition") .map { case (k, v) => Header(k, v) } .toList } Future .collect( RequestUtils .multiParams(request) .flatMap { case (name, multiPartItem) => val dispositionParams: Map[String, String] = parseDispositionParams(Option(multiPartItem.headers.getHeader("content-disposition"))) val charset = getCharset(multiPartItem.contentType) for { partType <- m.partType(name) futureBody = apply(partType, Buf.ByteArray.Owned(multiPartItem.data), charset, request) } yield futureBody .map(body => Part(name, body, otherDispositionParams = dispositionParams - "name", headers = fileItemHeaders(multiPartItem.headers)) .asInstanceOf[RawPart] ) } .toSeq ) .map(_.toList) } }
Example 113
Source File: PlayRequestToRawBody.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.server.play import java.io.ByteArrayInputStream import java.nio.charset.Charset import akka.stream.Materializer import akka.util.ByteString import play.api.mvc.{RawBuffer, Request} import play.core.parsers.Multipart import sttp.model.Part import sttp.tapir.{RawBodyType, RawPart} import sttp.tapir.internal._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future class PlayRequestToRawBody(serverOptions: PlayServerOptions) { def apply[R](bodyType: RawBodyType[R], charset: Option[Charset], request: Request[RawBuffer], body: ByteString)(implicit mat: Materializer ): Future[R] = { bodyType match { case RawBodyType.StringBody(defaultCharset) => Future(new String(body.toArray, charset.getOrElse(defaultCharset))) case RawBodyType.ByteArrayBody => Future(body.toArray) case RawBodyType.ByteBufferBody => Future(body.toByteBuffer) case RawBodyType.InputStreamBody => Future(body.toArray).map(new ByteArrayInputStream(_)) case RawBodyType.FileBody => Future(java.nio.file.Files.write(serverOptions.temporaryFileCreator.create().path, body.toArray)) .map(p => p.toFile) case m: RawBodyType.MultipartBody => multiPartRequestToRawBody(request, m, body) } } private def multiPartRequestToRawBody[R](request: Request[RawBuffer], m: RawBodyType.MultipartBody, body: ByteString)(implicit mat: Materializer ): Future[Seq[RawPart]] = { val bodyParser = serverOptions.playBodyParsers.multipartFormData( Multipart.handleFilePartAsTemporaryFile(serverOptions.temporaryFileCreator) ) bodyParser.apply(request).run(body).flatMap { case Left(_) => Future.failed(new IllegalArgumentException("Unable to parse multipart form data.")) // TODO case Right(value) => val dataParts = value.dataParts.map { case (key, value) => apply( m.partType(key).get, charset(m.partType(key).get), request, ByteString(value.flatMap(_.getBytes).toArray) ).map(body => Part(key, body).asInstanceOf[RawPart]) }.toSeq val fileParts = value.files.map(f => { apply( m.partType(f.key).get, charset(m.partType(f.key).get), request, ByteString.apply(java.nio.file.Files.readAllBytes(f.ref.path)) ).map(body => Part(f.key, body, Map(f.key -> f.dispositionType, Part.FileNameDispositionParam -> f.filename), Nil) .asInstanceOf[RawPart] ) }) Future.sequence(dataParts ++ fileParts) } } }
Example 114
Source File: TypedSparkeyReader.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.extra.sparkey.instances import java.nio.charset.Charset import com.spotify.scio.util.Cache import com.spotify.sparkey.SparkeyReader import scala.jdk.CollectionConverters._ class TypedSparkeyReader[T]( val sparkey: SparkeyReader, val decoder: Array[Byte] => T, val cache: Cache[String, T] = Cache.noOp ) extends Map[String, T] { private def stringKeyToBytes(key: String): Array[Byte] = key.getBytes(Charset.defaultCharset()) private def loadValueFromSparkey(key: String): T = { val value = sparkey.getAsByteArray(stringKeyToBytes(key)) if (value == null) { null.asInstanceOf[T] } else { decoder(value) } } override def get(key: String): Option[T] = Option(cache.get(key, loadValueFromSparkey(key))) override def iterator: Iterator[(String, T)] = sparkey.iterator.asScala.map { e => val key = e.getKeyAsString val value = cache.get(key).getOrElse(decoder(e.getValue)) (key, value) } override def +[B1 >: T](kv: (String, B1)): Map[String, B1] = throw new NotImplementedError("Sparkey-backed map; operation not supported.") override def -(key: String): Map[String, T] = throw new NotImplementedError("Sparkey-backed map; operation not supported.") def close(): Unit = { sparkey.close() cache.invalidateAll() } }
Example 115
Source File: TypedSparkeyReader.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.extra.sparkey.instances import java.nio.charset.Charset import com.spotify.scio.util.Cache import com.spotify.sparkey.SparkeyReader import scala.jdk.CollectionConverters._ class TypedSparkeyReader[T]( val sparkey: SparkeyReader, val decoder: Array[Byte] => T, val cache: Cache[String, T] = Cache.noOp ) extends Map[String, T] { private def stringKeyToBytes(key: String): Array[Byte] = key.getBytes(Charset.defaultCharset()) private def loadValueFromSparkey(key: String): T = { val value = sparkey.getAsByteArray(stringKeyToBytes(key)) if (value == null) { null.asInstanceOf[T] } else { decoder(value) } } override def get(key: String): Option[T] = Option(cache.get(key, loadValueFromSparkey(key))) override def iterator: Iterator[(String, T)] = sparkey.iterator.asScala.map { e => val key = e.getKeyAsString val value = cache.get(key).getOrElse(decoder(e.getValue)) (key, value) } override def updated[B1 >: T](k: String, v: B1): Map[String, B1] = throw new NotImplementedError("Sparkey-backed map; operation not supported.") override def removed(key: String): Map[String, T] = throw new NotImplementedError("Sparkey-backed map; operation not supported.") def close(): Unit = { sparkey.close() cache.invalidateAll() } }
Example 116
Source File: CodeGeneratorTest.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.models import java.io.PrintWriter import java.net.URI import java.nio.charset.Charset import java.nio.file.{Paths, Files} import java.util.Base64 import javafx.scene.Node import javafx.scene.paint.{Paint, Color} import javafx.scene.shape.{LineTo, MoveTo} import javafx.scene.text.TextAlignment import de.thm.move.MoveSpec import de.thm.move.models.ModelicaCodeGenerator.FormatSrc import de.thm.move.models.ModelicaCodeGenerator.FormatSrc.FormatSrc import de.thm.move.types._ import de.thm.move.util.ResourceUtils import de.thm.move.util.GeometryUtils import de.thm.move.views.shapes._ class CodeGeneratorTest extends MoveSpec { val dummyURL = Paths.get(System.getProperty("user.home")).toUri private def eqTest(toTest:String,expected:String): Unit = { if(!toTest.contains(expected)) { println(toTest) println("Expected: "+expected) } assert(toTest.contains(expected), s"Expected [$toTest] containing [$expected]") } "ModelicaCodeGenerator" should "generate Rectangles" in { val generator = new ModelicaCodeGenerator(FormatSrc.Pretty, 1, 500,500) val rect = new ResizableRectangle((0,0), 100,100) rect.colorizeShape(Color.BLACK, Color.BLACK) rect.setRotate(90.0) val str = generator.generateShape(rect, "test", dummyURL)(1) eqTest(str, "origin = {50,450}") eqTest(str, "extent = {{-50,50}, {50,-50}}") val generator2 = new ModelicaCodeGenerator(FormatSrc.Pretty, 4, 500,500) val str2 = generator2.generateShape(rect, "test", dummyURL)(1) eqTest(str2, "origin = {12,112}") eqTest(str2, "extent = {{-12,12}, {12,-12}}") } it should "generate Circles" in { val generator = new ModelicaCodeGenerator(FormatSrc.Pretty, 1, 500,500) val circle = new ResizableCircle((100,100), 50,50) circle.colorizeShape(Color.BLACK, Color.BLACK) circle.setRotate(90.0) val str = generator.generateShape(circle, "test", dummyURL)(1) eqTest(str, "origin = {100,400}") eqTest(str, "extent = {{-50,50}, {50,-50}}") } }
Example 117
Source File: ValueConfig.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.config import java.net.URL import java.nio.charset.Charset import java.nio.file.{Files, Paths} import de.thm.move.Global import de.thm.move.util.converters.Convertable import scala.collection._ import scala.io.Source class ValueConfig(url:URL) { val values = Source.fromURL(url, Global.encoding.name).getLines().to[mutable.ArrayBuffer] def getValues:List[String] = values.filter(!_.startsWith("##")).toList def getConvertedValues[A](implicit ev:Convertable[String, A]):List[A] = getValues.map(ev.convert) def setUniqueValue(v:String) = if(!values.contains(v)) setValue(v) def setValue(v:String) = values += v def removeValue(v:String):Unit = values.remove(values.indexOf(v)) def saveConfig(): Unit = { val writer = Files.newBufferedWriter(Paths.get(url.toURI), Charset.forName("UTF-8")) writer.write(values.mkString("\n")) writer.close() } }
Example 118
Source File: Global.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move import java.net.URL import java.nio.charset.Charset import java.nio.file.{Files, Path, Paths} import java.util.{Locale, ResourceBundle} import de.thm.move.config.{Config, ConfigLoader} import de.thm.move.history.History import de.thm.move.shortcuts.ShortCutHandler import de.thm.move.util.CustomResourceBundle object Global { private val configDirectoryName = ".move" private val configDirPath = Paths.get(System.getProperty("user.home"), configDirectoryName) def zippedUndo[A, B](xs:List[A])( fn: A => B)( exec: A => Unit, undo: A => B => Unit): Unit = { val zipped = xs zip xs.map(fn) history.execute { xs.foreach(exec) } { zipped.foreach { case (a,b) => undo(a)(b) } } } }
Example 119
Source File: UrlencodedData.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.internal import java.net.{URLDecoder, URLEncoder} import java.nio.charset.Charset private[tapir] object UrlencodedData { def decode(s: String, charset: Charset): Seq[(String, String)] = { s.split("&") .toList .flatMap(kv => kv.split("=", 2) match { case Array(k, v) => Some((URLDecoder.decode(k, charset.toString), URLDecoder.decode(v, charset.toString))) case _ => None } ) } def encode(s: Seq[(String, String)], charset: Charset): String = { s.map { case (k, v) => s"${URLEncoder.encode(k, charset.toString)}=${URLEncoder.encode(v, charset.toString)}" } .mkString("&") } }
Example 120
Source File: FrameCodec.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql import cats.effect._ import cats.effect.concurrent._ import cats.effect.syntax.all._ import cats.instances.option._ import cats.syntax.all._ import io.asyncdb.netty.mysql.protocol.server._ import io.netty.buffer._ import io.netty.channel._ import io.netty.handler.codec._ import io.netty.util.AttributeKey import java.nio.charset.Charset import protocol.client._ import protocol.server._ class FrameEncoder[F[_]](config: MySQLSocketConfig) extends ChannelOutboundHandlerAdapter { override def write( ctx: ChannelHandlerContext, msg: AnyRef, p: ChannelPromise ) = { val charset = CharsetMap.of(config.charset) val packets = msg match { case m: HandshakeResponse => val buf = ctx.alloc().buffer(1024) PacketsEncoder.encode(m, buf, charset) } val wrapped = Unpooled.wrappedBuffer(packets: _*) ctx.write(wrapped, p) ctx.flush() } } class FrameDecoder[F[_]]( config: MySQLSocketConfig, ctxRef: Ref[F, ChannelContext[F]], msgRef: MsgRef[F] )(implicit F: ConcurrentEffect[F]) extends ByteToMessageDecoder { private val FSM = new StateMachine[F](config) private val charsetKey: AttributeKey[Short] = AttributeKey.valueOf("Charset") override def decode( ctx: ChannelHandlerContext, in: ByteBuf, out: java.util.List[AnyRef] ) = { if (PacketDecoder.isReady(in)) { ctxRef.access.flatMap { case (old, updateF) => FSM.transition(in).run(old).flatMap { case (nc, ChannelState.Result(o, e)) => val fireOutgoing = o.traverse { om => F.delay { ctx.channel().write(om) } } val enqueueEmit = e.traverse { em => msgRef.put(em) } fireOutgoing *> enqueueEmit *> updateF(nc) } }.toIO.unsafeRunSync() } } }
Example 121
Source File: State.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql import cats.MonadError import cats.data.StateT import cats.effect.concurrent._ import cats.syntax.all._ import cats.instances.either._ import io.netty.buffer._ import io.netty.channel._ import java.nio.charset.Charset import protocol.client._ import protocol.server._ sealed trait ChannelState object ChannelState { sealed trait Handshake extends ChannelState object Handshake { object WaitHandshakeInit extends Handshake object WaitAuthResult extends Handshake } object ReadForCommand extends ChannelState def transition: Transition = { buf => StateT { ctx => ctx.state match { // Read the handshake init, if not receive it, then send server case ChannelState.Handshake.WaitHandshakeInit => PacketDecoder[HandshakeInit] .decode(buf, Charset.defaultCharset()) .liftTo[F] .flatMap { init => val out = HandshakeResponse(init, config) val r = ChannelState.Result(Some(out), None) val nc = ctx.copy(state = ChannelState.Handshake.WaitAuthResult) nc.serverCharset.complete(init.charset).as(nc -> r) } // Read auth result, emit it and make state [[ChannelState.ReadForCommand]] case ChannelState.Handshake.WaitAuthResult => for { cs <- ctx.serverCharset.get m <- PacketDecoder[OrErr[Ok]].decode(buf, cs).liftTo[F] } yield { val nc = ctx.copy( state = ChannelState.ReadForCommand ) val outgoing = None val emit = Some(m) (nc, ChannelState.Result(outgoing, emit)) } } } } }
Example 122
Source File: Auth.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql import java.security.MessageDigest import java.nio.charset.Charset import io.netty.buffer._ object Auth { def nativePassword( seed: Array[Byte], password: String, charset: Charset ): Array[Byte] = { val md = MessageDigest.getInstance("SHA-1") val hash1 = md.digest(password.getBytes(charset)) md.reset() val hash2 = md.digest(hash1) md.reset() md.update(seed) md.update(hash2) val digest = md.digest() (0 until digest.length) foreach { i => digest(i) = (digest(i) ^ hash1(i)).toByte } digest } }
Example 123
Source File: MySQLSocket.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql import cats.syntax.all._ import cats.effect._ import cats.effect.concurrent._ import cats.data.NonEmptyList import io.netty.bootstrap.Bootstrap import io.netty.channel.{Channel, ChannelInitializer} import java.nio.charset.Charset import protocol.client._ import protocol.server._ case class MySQLSocketConfig( bootstrap: Bootstrap, username: String, password: Option[String], database: Option[String], charset: Short, authMethod: Option[String] ) extends NettySocketConfig class MySQLSocket[F[_]]( config: MySQLSocketConfig, channelHolder: Deferred[F, Either[Throwable, Channel]], ref: MsgRef[F] )(implicit F: Concurrent[F]) extends NettySocket[F, Message](config, channelHolder) { def connect = { open.flatMap(_.read).as(this) } def disconnect = { close.void } def write(n: Message) = { channel.flatMap(_.write(n).to[F]).void } def read = ref.take.flatMap { case OrErr(value) => F.fromEither(value) case v => F.pure(v) } } object MySQLSocket { def apply[F[_]: ConcurrentEffect](config: MySQLSocketConfig) = { for { msgRef <- MVar[F].empty[Message] clientCS <- Deferred[F, Charset] initCtx = ChannelContext( ChannelState.Handshake.WaitHandshakeInit, clientCS ) ctxRef <- Ref[F].of(initCtx) decoder = new FrameDecoder[F](config, ctxRef, msgRef) encoder = new FrameEncoder(config) initHandler = new ChannelInitializer[Channel] { override def initChannel(channel: Channel): Unit = { channel .pipeline() .addLast("MySQLFrameDecoder", decoder) .addLast("MySQLFrameEncoder", encoder) } } _ = config.bootstrap.handler(initHandler) channel <- Deferred[F, Either[Throwable, Channel]] } yield new MySQLSocket[F](config, channel, msgRef) } }
Example 124
Source File: package.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql import io.netty.buffer.ByteBuf import java.nio.charset.Charset import shapeless._ package object protocol { implicit class ValueEncoderOps[A](val encoder: Encoder[A]) extends AnyVal { def ::[H](pre: Encoder[H]): Encoder[H :: A :: HNil] = new Encoder[H :: A :: HNil] { def encode(v: H :: A :: HNil, buf: ByteBuf, charset: Charset) = { val hv = v.head val av = v.tail.head pre.encode(hv, buf, charset) encoder.encode(av, buf, charset) } } } implicit class HListEncoderOps[L <: HList](val encoder: Encoder[L]) extends AnyVal { def ::[H](pre: Encoder[H]): Encoder[H :: L] = new Encoder[H :: L] { def encode(v: H :: L, buf: ByteBuf, charset: Charset) = { pre.encode(v.head, buf, charset) encoder.encode(v.tail, buf, charset) } } def as[A](implicit gen: Generic.Aux[A, L]): Encoder[A] = encoder.contramap(l => gen.to(l)) } implicit class ValueDecoderOps[A](val Decoder: Decoder[A]) extends AnyVal { def ::[H](pre: Decoder[H]): Decoder[H :: A :: HNil] = new Decoder[H :: A :: HNil] { def decode(buf: ByteBuf, charset: Charset) = { val h = pre.decode(buf, charset) val t = Decoder.decode(buf, charset) h :: t :: HNil } } } implicit class HListDecoderOps[L <: HList](val decoder: Decoder[L]) extends AnyVal { def ::[H](pre: Decoder[H]): Decoder[H :: L] = new Decoder[H :: L] { def decode(buf: ByteBuf, charset: Charset) = { val h = pre.decode(buf, charset) val l = decoder.decode(buf, charset) h :: l } } def as[A](implicit gen: Generic.Aux[A, L]): Decoder[A] = decoder.map(l => gen.from(l)) } }
Example 125
Source File: HandshakeInit.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql package protocol package server import io.netty.buffer._ import java.nio.charset.Charset import cats.syntax.all._ case class HandshakeInit( protocol: Int, version: String, connectionId: Int, authPluginData: Array[Byte], charset: Charset, cap: Int, authenticationMethod: String ) extends Message case class ExtraHandshakeData( characterSet: Short, statusFlags: Int, capabilityFlagUpper: Int, authPluginDataLen: Short, reserved: Array[Byte], authPluginDataPart2: Array[Byte], authenticationMethod: Array[Byte] ) case class BasicHandshakeData( protocol: Byte, version: Array[Byte], connectionId: Int, authPluginDataPart1: Array[Byte], filter: Byte, capabilityFlagLower: Int ) object HandshakeInit { private def apply(b: BasicHandshakeData, e: ExtraHandshakeData) = { val c = CharsetMap.of(e.characterSet) val r = new HandshakeInit( protocol = b.protocol, version = new String(b.version, c), connectionId = b.connectionId, authPluginData = Array.concat(b.authPluginDataPart1,e.authPluginDataPart2), charset = c, cap = b.capabilityFlagLower & e.capabilityFlagUpper, authenticationMethod = new String(e.authenticationMethod, c) ) r } import Decoder._ implicit val handshakeInitDecoder: Decoder[HandshakeInit] = { val basic = (int1 :: ntBytes :: intL4 :: bytes(8) :: int1 :: intL2) .as[BasicHandshakeData] val extra = (uint1 :: intL2 :: intL2 :: uint1.flatMap { pdl => val apd2Len = math.max(13, pdl - 8) val apd2 = if(apd2Len > 0) { // Mysql documentation says [[authPluginDataPart2]] was length-encoded string, but actually is null terminated string ntBytes } else Decoder.pure(Array.empty[Byte]) Decoder.pure(pdl) :: bytes(10) :: apd2 :: ntBytes }).as[ExtraHandshakeData] for { b <- basic e <- extra } yield apply(b, e) } }
Example 126
Source File: OrError.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql package protocol package server import io.netty.buffer.ByteBuf import java.nio.charset.Charset case class OrErr[R <: Message](value: Either[Err, R]) extends Message object OrErr { implicit def resultOrErr[R <: Message]( implicit ad: Decoder[R] ): Decoder[OrErr[R]] = new Decoder[OrErr[R]] { def decode(buf: ByteBuf, charset: Charset) = { val head = buf.getByte(buf.readerIndex()) if (head.toByte == -1) { OrErr(Left(Err.errDecoder.decode(buf, charset))) } else { OrErr(Right(ad.decode(buf, charset))) } } } }
Example 127
Source File: HandshakeResponse.scala From asyncdb with Apache License 2.0 | 5 votes |
package io.asyncdb package netty package mysql package protocol package client import io.netty.buffer.ByteBuf import java.nio.charset.Charset import shapeless._ case class HandshakeResponse( clientFlag: Int, maxPacketSize: Int, charset: Short, filter: Array[Byte], username: String, password: Array[Byte], database: Option[String], authMethod: String ) extends Message object HandshakeResponse { def apply(init: server.HandshakeInit, config: MySQLSocketConfig) = { val cap = { val base = Cap.baseCap val withDatabase = config.database.fold(base)(_ => base + Cap.ConnectWithDB) withDatabase } val passBytes = config.password.fold(Array.empty[Byte])(p => Auth.nativePassword(init.authPluginData, p, CharsetMap.of(config.charset))) new HandshakeResponse( clientFlag = cap.mask, maxPacketSize = Packet.MaxSize, charset = config.charset, database = config.database, username = config.username, password = passBytes, authMethod = config.authMethod.getOrElse(init.authenticationMethod), filter = Array.fill(23)(0.toByte) ) } import Encoder._ implicit val handshakeResponseEncoder: Encoder[HandshakeResponse] = Encoder[HandshakeResponse] { data => (intL4 :: intL4 :: uint1 :: bytes :: ntText :: lenencBytes :: ntText.? :: ntText).as[HandshakeResponse] } }
Example 128
Source File: FlywayConfig.scala From scala-server-toolkit with MIT License | 5 votes |
package com.avast.sst.flyway import java.nio.charset.{Charset, StandardCharsets} import org.flywaydb.core.api.MigrationVersion final case class FlywayConfig( baselineOnMigrate: Boolean = false, baselineVersion: Option[MigrationVersion] = None, targetVersion: Option[MigrationVersion] = None, baselineDescription: Option[String] = None, cleanDisabled: Boolean = false, cleanOnValidationError: Boolean = false, connectRetries: Int = 0, encoding: Charset = StandardCharsets.UTF_8, group: Boolean = false, ignoreFutureMigrations: Boolean = true, ignoreIgnoredMigrations: Boolean = false, ignoreMissingMigrations: Boolean = false, ignorePendingMigrations: Boolean = false, installedBy: Option[String] = None, mixed: Boolean = false, locations: List[String] = List.empty, outOfOrder: Boolean = false, validateOnMigrate: Boolean = true, placeholderReplacement: Boolean = true, placeholders: Map[String, String] = Map.empty )
Example 129
Source File: utf8.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.nio.charset.Charset import scala.io.Codec object UTF8 { // see also https://en.wikipedia.org/wiki/UTF-8#Description // overlong encodings enable byte-stuffing of low-ASCII def decode_permissive(text: CharSequence): String = { val buf = new java.lang.StringBuilder(text.length) var code = -1 var rest = 0 def flush() { if (code != -1) { if (rest == 0 && Character.isValidCodePoint(code)) buf.appendCodePoint(code) else buf.append('\uFFFD') code = -1 rest = 0 } } def init(x: Int, n: Int) { flush() code = x rest = n } def push(x: Int) { if (rest <= 0) init(x, -1) else { code <<= 6 code += x rest -= 1 } } for (i <- 0 until text.length) { val c = text.charAt(i) if (c < 128) { flush(); buf.append(c) } else if ((c & 0xC0) == 0x80) push(c & 0x3F) else if ((c & 0xE0) == 0xC0) init(c & 0x1F, 1) else if ((c & 0xF0) == 0xE0) init(c & 0x0F, 2) else if ((c & 0xF8) == 0xF0) init(c & 0x07, 3) } flush() buf.toString } private class Decode_Chars(decode: String => String, buffer: Array[Byte], start: Int, end: Int) extends CharSequence { def length: Int = end - start def charAt(i: Int): Char = (buffer(start + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] def subSequence(i: Int, j: Int): CharSequence = new Decode_Chars(decode, buffer, start + i, start + j) // toString with adhoc decoding: abuse of CharSequence interface override def toString: String = decode(decode_permissive(this)) } def decode_chars(decode: String => String, buffer: Array[Byte], start: Int, end: Int): CharSequence = { require(0 <= start && start <= end && end <= buffer.length) new Decode_Chars(decode, buffer, start, end) } }
Example 130
Source File: isabelle_charset.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.nio.Buffer import java.nio.{ByteBuffer, CharBuffer} import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder, CoderResult} import java.nio.charset.spi.CharsetProvider object Isabelle_Charset { val name: String = "UTF-8-Isabelle-test" // FIXME lazy val charset: Charset = new Isabelle_Charset } class Isabelle_Charset extends Charset(Isabelle_Charset.name, null) { override def contains(cs: Charset): Boolean = cs.name.equalsIgnoreCase(UTF8.charset_name) || UTF8.charset.contains(cs) override def newDecoder(): CharsetDecoder = UTF8.charset.newDecoder override def newEncoder(): CharsetEncoder = UTF8.charset.newEncoder } class Isabelle_Charset_Provider extends CharsetProvider { override def charsetForName(name: String): Charset = { // FIXME inactive // if (name.equalsIgnoreCase(Isabelle_Charset.name)) Isabelle_Charset.charset // else null null } override def charsets(): java.util.Iterator[Charset] = { import scala.collection.JavaConversions._ // FIXME inactive // Iterator(Isabelle_Charset.charset) Iterator() } }
Example 131
Source File: utf8.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.nio.charset.Charset import scala.io.Codec object UTF8 { // see also https://en.wikipedia.org/wiki/UTF-8#Description // overlong encodings enable byte-stuffing of low-ASCII def decode_permissive(text: CharSequence): String = { val buf = new java.lang.StringBuilder(text.length) var code = -1 var rest = 0 def flush() { if (code != -1) { if (rest == 0 && Character.isValidCodePoint(code)) buf.appendCodePoint(code) else buf.append('\uFFFD') code = -1 rest = 0 } } def init(x: Int, n: Int) { flush() code = x rest = n } def push(x: Int) { if (rest <= 0) init(x, -1) else { code <<= 6 code += x rest -= 1 } } for (i <- 0 until text.length) { val c = text.charAt(i) if (c < 128) { flush(); buf.append(c) } else if ((c & 0xC0) == 0x80) push(c & 0x3F) else if ((c & 0xE0) == 0xC0) init(c & 0x1F, 1) else if ((c & 0xF0) == 0xE0) init(c & 0x0F, 2) else if ((c & 0xF8) == 0xF0) init(c & 0x07, 3) } flush() buf.toString } private class Decode_Chars(decode: String => String, buffer: Array[Byte], start: Int, end: Int) extends CharSequence { def length: Int = end - start def charAt(i: Int): Char = (buffer(start + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] def subSequence(i: Int, j: Int): CharSequence = new Decode_Chars(decode, buffer, start + i, start + j) // toString with adhoc decoding: abuse of CharSequence interface override def toString: String = decode(decode_permissive(this)) } def decode_chars(decode: String => String, buffer: Array[Byte], start: Int, end: Int): CharSequence = { require(0 <= start && start <= end && end <= buffer.length) new Decode_Chars(decode, buffer, start, end) } }
Example 132
Source File: isabelle_charset.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.nio.Buffer import java.nio.{ByteBuffer, CharBuffer} import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder, CoderResult} import java.nio.charset.spi.CharsetProvider object Isabelle_Charset { val name: String = "UTF-8-Isabelle-test" // FIXME lazy val charset: Charset = new Isabelle_Charset } class Isabelle_Charset extends Charset(Isabelle_Charset.name, null) { override def contains(cs: Charset): Boolean = cs.name.equalsIgnoreCase(UTF8.charset_name) || UTF8.charset.contains(cs) override def newDecoder(): CharsetDecoder = UTF8.charset.newDecoder override def newEncoder(): CharsetEncoder = UTF8.charset.newEncoder } class Isabelle_Charset_Provider extends CharsetProvider { override def charsetForName(name: String): Charset = { // FIXME inactive // if (name.equalsIgnoreCase(Isabelle_Charset.name)) Isabelle_Charset.charset // else null null } override def charsets(): java.util.Iterator[Charset] = { import scala.collection.JavaConversions._ // FIXME inactive // Iterator(Isabelle_Charset.charset) Iterator() } }
Example 133
Source File: utf8.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.nio.charset.Charset import scala.io.Codec object UTF8 { // see also http://en.wikipedia.org/wiki/UTF-8#Description // overlong encodings enable byte-stuffing of low-ASCII def decode_permissive(text: CharSequence): String = { val buf = new java.lang.StringBuilder(text.length) var code = -1 var rest = 0 def flush() { if (code != -1) { if (rest == 0 && Character.isValidCodePoint(code)) buf.appendCodePoint(code) else buf.append('\uFFFD') code = -1 rest = 0 } } def init(x: Int, n: Int) { flush() code = x rest = n } def push(x: Int) { if (rest <= 0) init(x, -1) else { code <<= 6 code += x rest -= 1 } } for (i <- 0 until text.length) { val c = text.charAt(i) if (c < 128) { flush(); buf.append(c) } else if ((c & 0xC0) == 0x80) push(c & 0x3F) else if ((c & 0xE0) == 0xC0) init(c & 0x1F, 1) else if ((c & 0xF0) == 0xE0) init(c & 0x0F, 2) else if ((c & 0xF8) == 0xF0) init(c & 0x07, 3) } flush() buf.toString } private class Decode_Chars(decode: String => String, buffer: Array[Byte], start: Int, end: Int) extends CharSequence { def length: Int = end - start def charAt(i: Int): Char = (buffer(start + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] def subSequence(i: Int, j: Int): CharSequence = new Decode_Chars(decode, buffer, start + i, start + j) // toString with adhoc decoding: abuse of CharSequence interface override def toString: String = decode(decode_permissive(this)) } def decode_chars(decode: String => String, buffer: Array[Byte], start: Int, end: Int): CharSequence = { require(0 <= start && start <= end && end <= buffer.length) new Decode_Chars(decode, buffer, start, end) } }
Example 134
Source File: isabelle_charset.scala From libisabelle with Apache License 2.0 | 5 votes |
package isabelle import java.nio.Buffer import java.nio.{ByteBuffer, CharBuffer} import java.nio.charset.{Charset, CharsetDecoder, CharsetEncoder, CoderResult} import java.nio.charset.spi.CharsetProvider object Isabelle_Charset { val name: String = "UTF-8-Isabelle-test" // FIXME lazy val charset: Charset = new Isabelle_Charset } class Isabelle_Charset extends Charset(Isabelle_Charset.name, null) { override def contains(cs: Charset): Boolean = cs.name.equalsIgnoreCase(UTF8.charset_name) || UTF8.charset.contains(cs) override def newDecoder(): CharsetDecoder = UTF8.charset.newDecoder override def newEncoder(): CharsetEncoder = UTF8.charset.newEncoder } class Isabelle_Charset_Provider extends CharsetProvider { override def charsetForName(name: String): Charset = { // FIXME inactive // if (name.equalsIgnoreCase(Isabelle_Charset.name)) Isabelle_Charset.charset // else null null } override def charsets(): java.util.Iterator[Charset] = { import scala.collection.JavaConversions._ // FIXME inactive // Iterator(Isabelle_Charset.charset) Iterator() } }
Example 135
Source File: FutureCodec.scala From aws-lambda-scala with MIT License | 5 votes |
package io.github.mkotsur.aws.codecs import java.io.ByteArrayOutputStream import java.nio.charset.Charset import io.circe.Encoder import io.github.mkotsur.aws.handler.CanEncode import io.github.mkotsur.aws.proxy.ProxyResponse import io.circe.generic.auto._ import io.circe.syntax._ import cats.syntax.either.catsSyntaxEither import scala.concurrent.{Await, Future} import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.{Failure, Success, Try} private[aws] trait FutureCodec { implicit def canEncodeFuture[I: Encoder](implicit canEncode: Encoder[I]) = CanEncode.instance[Future[I]]((os, responseEither, ctx) => { (for { response <- responseEither.toTry futureResult <- Try(Await.result(response, ctx.getRemainingTimeInMillis millis)) json <- Try(canEncode(futureResult).noSpaces.getBytes) _ <- Try(os.write(json)) } yield { () }) match { case Success(v) => Right(v) case Failure(e) => Left(e) } }) implicit def canEncodeProxyResponse[T](implicit canEncode: CanEncode[T]) = CanEncode.instance[ProxyResponse[T]]( (output, proxyResponseEither, ctx) => { def writeBody(bodyOption: Option[T]): Either[Throwable, Option[String]] = bodyOption match { case None => Right(None) case Some(body) => val os = new ByteArrayOutputStream() val result = canEncode.writeStream(os, Right(body), ctx) os.close() result.map(_ => Some(os.toString())) } val proxyResposeOrError = for { proxyResponse <- proxyResponseEither bodyOption <- writeBody(proxyResponse.body) } yield ProxyResponse[String]( proxyResponse.statusCode, proxyResponse.headers, bodyOption ) val response = proxyResposeOrError match { case Right(proxyRespose) => proxyRespose case Left(e) => ProxyResponse[String]( 500, Some(Map("Content-Type" -> s"text/plain; charset=${Charset.defaultCharset().name()}")), Some(e.getMessage) ) } output.write(response.asJson.noSpaces.getBytes) Right(()) } ) }
Example 136
Source File: FileUtilsSpec.scala From warp-core with MIT License | 5 votes |
package com.workday.warp.common.utils import java.io.InputStream import java.nio.charset.Charset import java.util.zip.ZipException import com.workday.warp.common.spec.WarpJUnitSpec import com.workday.warp.junit.UnitTest import org.apache.commons.io.IOUtils @UnitTest def testNonExistentZipEntryStreamCannotBeRetrievedFromZipFile(): Unit = { val zipEntryName: String = "DOES_NOT_EXIST" val filePath: String = getClass.getResource("/simpleZip.zip").getPath val thrown: Throwable = intercept[NullPointerException] { FileUtils.getStreamToLogFileInZip(filePath, "DOES_NOT_EXIST") } thrown.getMessage should be (s"Zip entry $zipEntryName not found in $filePath") } }
Example 137
Source File: JsonMQDeserializer.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.Charset import spray.json._ import io.deepsense.workflowexecutor.communication.mq.MQDeserializer import io.deepsense.workflowexecutor.communication.mq.json.Constants.JsonKeys._ class JsonMQDeserializer( jsonDeserializers: Seq[JsonMessageDeserializer], parent: Option[JsonMQDeserializer] = None) extends MQDeserializer with JsonMessageDeserializer { private val combinedJsonDeserializers = { jsonDeserializers.tail.foldLeft(jsonDeserializers.head.deserialize) { case (acc, deserializer) => acc.orElse(deserializer.deserialize) } } override val deserialize: PartialFunction[(String, JsObject), Any] = { parent match { case Some(p) => combinedJsonDeserializers.orElse(p.deserialize) case None => combinedJsonDeserializers } } override def deserializeMessage(data: Array[Byte]): Any = { val json = new String(data, Global.charset).parseJson val jsObject = json.asJsObject val fields = jsObject.fields import spray.json.DefaultJsonProtocol._ val messageType = getField(fields, messageTypeKey).convertTo[String] val body = getField(fields, messageBodyKey).asJsObject() deserialize(messageType, body) } def orElse(next: JsonMQDeserializer): JsonMQDeserializer = new JsonMQDeserializer(jsonDeserializers, Some(next)) private def getField(fields: Map[String, JsValue], fieldName: String): JsValue = { try { fields(fieldName) } catch { case e: NoSuchElementException => throw new DeserializationException(s"Missing field: $fieldName", e) } } }
Example 138
Source File: Global.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.Charset import io.deepsense.deeplang.CatalogRecorder import io.deepsense.models.json.graph.GraphJsonProtocol.GraphReader import io.deepsense.models.json.workflow.InferredStateJsonProtocol import io.deepsense.models.json.workflow.InferredStateJsonProtocol._ import io.deepsense.models.json.workflow.ExecutionReportJsonProtocol._ import io.deepsense.models.workflows.{ExecutionReport, InferredState} import io.deepsense.workflowexecutor.communication.message.global._ import io.deepsense.workflowexecutor.communication.message.global.HeartbeatJsonProtocol._ import io.deepsense.workflowexecutor.communication.message.global.PoisonPillJsonProtocol._ import io.deepsense.workflowexecutor.communication.message.global.ReadyJsonProtocol._ import io.deepsense.workflowexecutor.communication.message.global.LaunchJsonProtocol._ object Global { val charset = Charset.forName("UTF-8") val dOperationsCatalog = CatalogRecorder.resourcesCatalogRecorder.catalogs.dOperationsCatalog val graphReader = new GraphReader(dOperationsCatalog) val inferredStateJsonProtocol = InferredStateJsonProtocol(graphReader) import inferredStateJsonProtocol._ import Constants.MessagesTypes._ object HeartbeatDeserializer extends DefaultJsonMessageDeserializer[Heartbeat](heartbeat) object HeartbeatSerializer extends DefaultJsonMessageSerializer[Heartbeat](heartbeat) object PoisonPillDeserializer extends DefaultJsonMessageDeserializer[PoisonPill](poisonPill) object PoisonPillSerializer extends DefaultJsonMessageSerializer[PoisonPill](poisonPill) object ReadyDeserializer extends DefaultJsonMessageDeserializer[Ready](ready) object ReadySerializer extends DefaultJsonMessageSerializer[Ready](ready) object LaunchDeserializer extends DefaultJsonMessageDeserializer[Launch](launch) object LaunchSerializer extends DefaultJsonMessageSerializer[Launch](launch) object ExecutionReportSerializer extends DefaultJsonMessageSerializer[ExecutionReport](executionReport) object ExecutionReportDeserializer extends DefaultJsonMessageDeserializer[ExecutionReport](executionReport) object InferredStateSerializer extends DefaultJsonMessageSerializer[InferredState](inferredState) object InferredStateDeserializer extends DefaultJsonMessageDeserializer[InferredState](inferredState) object GlobalMQSerializer extends JsonMQSerializer( Seq(HeartbeatSerializer, PoisonPillSerializer, ReadySerializer, LaunchSerializer, ExecutionReportSerializer, InferredStateSerializer )) object GlobalMQDeserializer extends JsonMQDeserializer( Seq(HeartbeatDeserializer, PoisonPillDeserializer, ReadyDeserializer, LaunchDeserializer, ExecutionReportDeserializer, InferredStateDeserializer )) }
Example 139
Source File: JsonMQSerializer.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.Charset import spray.json.JsObject import io.deepsense.workflowexecutor.communication.mq.MQSerializer class JsonMQSerializer( jsonSerializers: Seq[JsonMessageSerializer], parent: Option[JsonMQSerializer] = None ) extends MQSerializer with JsonMessageSerializer { private val combinedJsonSerializers = { jsonSerializers.tail.foldLeft(jsonSerializers.head.serialize) { case (acc, serializer) => acc.orElse(serializer.serialize) } } override val serialize: PartialFunction[Any, JsObject] = { parent match { case Some(p) => combinedJsonSerializers.orElse(p.serialize) case None => combinedJsonSerializers } } override def serializeMessage(message: Any): Array[Byte] = { serialize(message).compactPrint.getBytes(Global.charset) } def orElse(next: JsonMQSerializer): JsonMQSerializer = new JsonMQSerializer(jsonSerializers, Some(next)) }
Example 140
Source File: ProtocolJsonSerializerSpec.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.workflowexecutor.communication.mq.serialization.json import java.nio.charset.Charset import org.scalatest.mockito.MockitoSugar import spray.json._ import io.deepsense.commons.StandardSpec import io.deepsense.commons.models.Entity import io.deepsense.deeplang.DOperable import io.deepsense.deeplang.doperables.ColumnsFilterer import io.deepsense.graph._ import io.deepsense.models.json.graph.GraphJsonProtocol.GraphReader import io.deepsense.models.json.workflow.{ExecutionReportJsonProtocol, InferredStateJsonProtocol, WorkflowWithResultsJsonProtocol} import io.deepsense.models.workflows._ import io.deepsense.reportlib.model.factory.ReportContentTestFactory import io.deepsense.workflowexecutor.communication.message.global._ import io.deepsense.workflowexecutor.communication.message.workflow.Synchronize class ProtocolJsonSerializerSpec extends StandardSpec with MockitoSugar with WorkflowWithResultsJsonProtocol with InferredStateJsonProtocol with HeartbeatJsonProtocol { override val graphReader: GraphReader = mock[GraphReader] "ProtocolJsonSerializer" should { val protocolJsonSerializer = ProtocolJsonSerializer(graphReader) "serialize Synchronize messages" in { protocolJsonSerializer.serializeMessage(Synchronize()) shouldBe expectedSerializationResult("synchronize", JsObject()) } } private def expectedSerializationResult(messageType: String, jsonObject: JsValue): Array[Byte] = { JsObject( "messageType" -> JsString(messageType), "messageBody" -> jsonObject ).compactPrint.getBytes(Charset.forName("UTF-8")) } }
Example 141
Source File: JsonSimpleConverter.scala From kafka-connect-common with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.converters.source import java.nio.charset.Charset import java.util import java.util.Collections import com.datamountaineer.streamreactor.connect.converters.MsgKey import org.apache.kafka.connect.data._ import org.apache.kafka.connect.source.SourceRecord class JsonSimpleConverter extends Converter { override def convert(kafkaTopic: String, sourceTopic: String, messageId: String, bytes: Array[Byte], keys:Seq[String] = Seq.empty, keyDelimiter:String = "."): SourceRecord = { require(bytes != null, s"Invalid $bytes parameter") val json = new String(bytes, Charset.defaultCharset) val schemaAndValue = JsonSimpleConverter.convert(sourceTopic, json) val value = schemaAndValue.value() value match { case s:Struct if keys.nonEmpty => val keysValue = keys.flatMap { key => Option(KeyExtractor.extract(s, key.split('.').toVector)).map(_.toString) }.mkString(keyDelimiter) new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic), null, kafkaTopic, Schema.STRING_SCHEMA, keysValue, schemaAndValue.schema(), schemaAndValue.value()) case _=> new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic), null, kafkaTopic, MsgKey.schema, MsgKey.getStruct(sourceTopic, messageId), schemaAndValue.schema(), schemaAndValue.value()) } } } object JsonSimpleConverter { import org.json4s._ import org.json4s.native.JsonMethods._ def convert(name: String, str: String): SchemaAndValue = convert(name, parse(str)) def convert(name: String, value: JValue): SchemaAndValue = { value match { case JArray(arr) => val values = new util.ArrayList[AnyRef]() val sv = convert(name, arr.head) values.add(sv.value()) arr.tail.foreach { v => values.add(convert(name, v).value()) } val schema = SchemaBuilder.array(sv.schema()).optional().build() new SchemaAndValue(schema, values) case JBool(b) => new SchemaAndValue(Schema.BOOLEAN_SCHEMA, b) case JDecimal(d) => val schema = Decimal.builder(d.scale).optional().build() new SchemaAndValue(schema, Decimal.fromLogical(schema, d.bigDecimal)) case JDouble(d) => new SchemaAndValue(Schema.FLOAT64_SCHEMA, d) case JInt(i) => new SchemaAndValue(Schema.INT64_SCHEMA, i.toLong) //on purpose! LONG (we might get later records with long entries) case JLong(l) => new SchemaAndValue(Schema.INT64_SCHEMA, l) case JNull | JNothing => new SchemaAndValue(Schema.STRING_SCHEMA, null) case JString(s) => new SchemaAndValue(Schema.STRING_SCHEMA, s) case JObject(values) => val builder = SchemaBuilder.struct().name(name.replace("/", "_")) val fields = values.map { case (n, v) => val schemaAndValue = convert(n, v) builder.field(n, schemaAndValue.schema()) n -> schemaAndValue.value() }.toMap val schema = builder.build() val struct = new Struct(schema) fields.foreach { case (field, v) => struct.put(field, v) } new SchemaAndValue(schema, struct) } } }
Example 142
Source File: JsonOptNullConverter.scala From kafka-connect-common with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.converters.source import java.nio.charset.Charset import java.util import java.util.Collections import com.datamountaineer.streamreactor.connect.converters.MsgKey import org.apache.kafka.connect.data._ import org.apache.kafka.connect.source.SourceRecord class JsonOptNullConverter extends Converter { override def convert(kafkaTopic: String, sourceTopic: String, messageId: String, bytes: Array[Byte], keys:Seq[String] = Seq.empty, keyDelimiter:String = "."): SourceRecord = { require(bytes != null, s"Invalid $bytes parameter") val json = new String(bytes, Charset.defaultCharset) val schemaAndValue = JsonOptNullConverter.convert(sourceTopic, json) val value = schemaAndValue.value() value match { case s:Struct if keys.nonEmpty => val keysValue = keys.flatMap { key => Option(KeyExtractor.extract(s, key.split('.').toVector)).map(_.toString) }.mkString(keyDelimiter) new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic), null, kafkaTopic, Schema.STRING_SCHEMA, keysValue, schemaAndValue.schema(), schemaAndValue.value()) case _=> new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic), null, kafkaTopic, MsgKey.schema, MsgKey.getStruct(sourceTopic, messageId), schemaAndValue.schema(), schemaAndValue.value()) } } } object JsonOptNullConverter { import org.json4s._ import org.json4s.native.JsonMethods._ def convert(name: String, str: String): SchemaAndValue = convert(name, parse(str)) def convert(name: String, value: JValue): SchemaAndValue = { value match { case JArray(arr) => val values = new util.ArrayList[AnyRef]() val sv = convert(name, arr.head) values.add(sv.value()) arr.tail.foreach { v => values.add(convert(name, v).value()) } val schema = SchemaBuilder.array(sv.schema()).optional().build() new SchemaAndValue(schema, values) case JBool(b) => new SchemaAndValue(Schema.BOOLEAN_SCHEMA, b) case JDecimal(d) => val schema = Decimal.builder(d.scale).optional().build() new SchemaAndValue(schema, Decimal.fromLogical(schema, d.bigDecimal)) case JDouble(d) => new SchemaAndValue(Schema.FLOAT64_SCHEMA, d) case JInt(i) => new SchemaAndValue(Schema.INT64_SCHEMA, i.toLong) //on purpose! LONG (we might get later records with long entries) case JLong(l) => new SchemaAndValue(Schema.INT64_SCHEMA, l) case JNull | JNothing => new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, null) case JString(s) => new SchemaAndValue(Schema.STRING_SCHEMA, s) case JObject(values) => val builder = SchemaBuilder.struct().name(name.replace("/", "_")) val fields = values.map { case (n, v) => val schemaAndValue = convert(n, v) builder.field(n, schemaAndValue.schema()) n -> schemaAndValue.value() }.toMap val schema = builder.build() val struct = new Struct(schema) fields.foreach { case (field, v) => struct.put(field, v) } new SchemaAndValue(schema, struct) } } }
Example 143
Source File: BitbucketCloudSourceAcl.scala From kafka-security-manager with MIT License | 5 votes |
package com.github.simplesteph.ksm.source import java.io._ import java.nio.charset.Charset import java.util.Base64 import com.typesafe.config.Config import org.slf4j.LoggerFactory import skinny.http.{HTTP, HTTPException, Request, Response} class BitbucketCloudSourceAcl extends SourceAcl { private val log = LoggerFactory.getLogger(classOf[BitbucketCloudSourceAcl]) override val CONFIG_PREFIX: String = "bitbucket-cloud" final val API_URL_CONFIG = "api.url" final val ORGANIZATION_CONFIG = "organization" final val REPO_CONFIG = "repo" final val FILEPATH_CONFIG = "filepath" final val AUTH_USERNAME_CONFIG = "auth.username" final val AUTH_PASSWORD_CONFIG = "auth.password" var lastCommit: Option[String] = None var apiurl: String = _ var organization: String = _ var repo: String = _ var filePath: String = _ var username: String = _ var password: String = _ override def close(): Unit = { // HTTP } }
Example 144
Source File: CnProxyComPlugin.scala From ProxyCrawler with Apache License 2.0 | 5 votes |
package org.crowdcrawler.proxycrawler.crawler.plugins import org.crowdcrawler.proxycrawler.ProxyInfo import org.jsoup.Jsoup import java.net.URI import java.nio.charset.Charset import scala.collection.{immutable,mutable} import util.control.Breaks._ private val charNum = immutable.Map( "v" -> "3", "m" -> "4", "a" -> "2", "l" -> "9", "q" -> "0", "b" -> "5", "i" -> "7", "w" -> "6", "r" -> "8", "c" -> "1" ) val seeds: List[URI] = { List( new URI("http://www.cnproxy.com/proxy1.html"), new URI("http://www.cnproxy.com/proxy2.html"), new URI("http://www.cnproxy.com/proxy3.html"), new URI("http://www.cnproxy.com/proxy4.html"), new URI("http://www.cnproxy.com/proxy5.html"), new URI("http://www.cnproxy.com/proxy6.html"), new URI("http://www.cnproxy.com/proxy7.html"), new URI("http://www.cnproxy.com/proxy8.html"), new URI("http://www.cnproxy.com/proxy9.html"), new URI("http://www.cnproxy.com/proxy10.html"), new URI("http://www.cnproxy.com/proxyedu1.html"), new URI("http://www.cnproxy.com/proxyedu2.html") ) } private def decryptPort(encrypted: String): Int = encrypted.split("\\+").map(str => charNum(str)).mkString.toInt def extract(html: String): List[ProxyInfo] = { val result = mutable.ListBuffer.empty[ProxyInfo] val doc = Jsoup.parse(html) val rows = doc.select("#proxylisttb > table").get(2).select("tr") for (i <- 1 until rows.size()) { breakable { // skip the first row val row = rows.get(i) val tds = row.select("td") val host = tds.get(0).text val port = { val pattern = "document.write(\":\"+" val original = tds.get(0).html() val pos1 = original.indexOf(pattern) if (pos1 == -1) break val pos2 = original.indexOf(")</script>", pos1) if (pos2 == -1) break val portStr = original.substring(pos1 + pattern.length, pos2) decryptPort(portStr) } val schema = tds.get(1).text val speeds = tds.get(2).text val speed = { val splitted = speeds.split(",") var sum = 0 for (str <- splitted) { val tmp = str.toInt sum += tmp } sum / splitted.length } val country = tds.get(3).text val proxyInfo = ProxyInfo(host, port, schema, speed, country, null) result += proxyInfo } } result.toList } def next(html: String): List[URI] = List() override val responseCharset: Charset = Charset.forName("GB2312") }
Example 145
Source File: IpcnOrgPlugin.scala From ProxyCrawler with Apache License 2.0 | 5 votes |
package org.crowdcrawler.proxycrawler.crawler.plugins import org.crowdcrawler.proxycrawler.ProxyInfo import org.jsoup.Jsoup import java.net.URI import java.nio.charset.Charset import scala.collection.mutable final class IpcnOrgPlugin extends AbstractPlugin { val seeds: List[URI] = List( new URI("http://proxy.ipcn.org/proxylist.html"), new URI("http://proxy.ipcn.org/proxylist2.html") ) def extract(html: String): List[ProxyInfo] = { val result = mutable.ListBuffer.empty[ProxyInfo] val doc = Jsoup.parse(html) val preText = doc.select("tr > td > pre").text val rows = preText.split("\n") for (row <- rows) { if (row.matches("[0-9]+(?:\\.[0-9]+){3}:[0-9]+")) { val splitted = row.split(":") val host = splitted(0) val port = splitted(1).toInt result += ProxyInfo(host, port, "HTTP", 0, null, null) } } result.toList } def next(html: String): List[URI] = List() override val responseCharset: Charset = Charset.forName("GB2312") }
Example 146
Source File: package.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core import java.nio.charset.Charset import com.typesafe.config.Config import scala.concurrent.duration._ import scala.concurrent.Future import scala.collection.mutable import shapeless.HList package object util { private[this] val _identityFunc = (x: Any) ⇒ x def identityFunc[T]: T ⇒ T = _identityFunc.asInstanceOf[T ⇒ T] def identityHash(obj: AnyRef): String = Integer.toHexString(System.identityHashCode(obj)) val dropFunc: Any ⇒ Unit = _ ⇒ () val dropFunc2: (Any, Any) ⇒ Unit = (_, _) ⇒ () val oneIntFunc: Any ⇒ Int = _ ⇒ 1 val UTF8: Charset = Charset.forName("UTF-8") val ASCII: Charset = Charset.forName("US-ASCII") def isPowerOf2(i: Int): Boolean = Integer.lowestOneBit(i) == i def roundUpToPowerOf2(i: Int): Int = 1 << (32 - Integer.numberOfLeadingZeros(i - 1)) def Runnable(body: ⇒ Unit): Runnable = new Runnable { def run(): Unit = body } implicit def richByteArray(array: Array[Byte]): RichByteArray = new RichByteArray(array) implicit def richConfig[T](config: Config): RichConfig = new RichConfig(config) implicit def richDuration(duration: Duration): RichDuration = new RichDuration(duration) implicit def richFiniteDuration(duration: FiniteDuration): RichFiniteDuration = new RichFiniteDuration(duration) implicit def richFuture[T](future: Future[T]): RichFuture[T] = new RichFuture(future) implicit def richHList[L <: HList](list: L): RichHList[L] = new RichHList(list) implicit def richInt(int: Int): RichInt = new RichInt(int) implicit def richList[T](list: List[T]): RichList[T] = new RichList(list) implicit def richLong(long: Long): RichLong = new RichLong(long) implicit def richArrayBuffer[T](seq: mutable.ArrayBuffer[T]): RichArrayBuffer[T] = new RichArrayBuffer(seq) implicit def richRefArray[T <: AnyRef](array: Array[T]): RichRefArray[T] = new RichRefArray(array) implicit def richSeq[T](seq: Seq[T]): RichSeq[T] = new RichSeq(seq) implicit def richString(string: String): RichString = new RichString(string) implicit def richTraversable[T](seq: Traversable[T]): RichTraversable[T] = new RichTraversable(seq) }
Example 147
Source File: package.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core import java.nio.charset.{Charset, CodingErrorAction} import swave.core.io.Bytes package object text { implicit class RichBytesStreamOpsText[T, S[X] <: StreamOps[X]](val underlying: S[T]) extends AnyVal { def decode(charset: Charset, onMalformedInput: CodingErrorAction = CodingErrorAction.REPORT, onUnmappableCharacter: CodingErrorAction = CodingErrorAction.REPLACE)( implicit ev: Bytes[T]): S[T]#Repr[String] = underlying.via(Text.decode[T](charset, onMalformedInput, onUnmappableCharacter)) def utf8Decode(implicit ev: Bytes[T]): S[T]#Repr[String] = underlying.via(Text.utf8Decode) } implicit class RichStringStreamOpsText[S <: StreamOps[String]](val underlying: S) extends AnyVal { def encode[T: Bytes](charset: Charset): S#Repr[T] = underlying.via(Text.encode(charset)) def utf8Encode[T: Bytes]: S#Repr[T] = underlying.via(Text.utf8Encode) def lines: S#Repr[String] = underlying.via(Text.lines) } }
Example 148
Source File: ByteVectorBytes.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.compat.scodec.impl import java.io.OutputStream import java.nio.ByteBuffer import java.nio.charset.{CharacterCodingException, Charset} import scala.collection.GenTraversableOnce import scodec.bits.ByteVector import swave.core.io.Bytes class ByteVectorBytes extends Bytes[ByteVector] { ///////////////// CONSTRUCTION /////////////////// def empty = ByteVector.empty def fill[A: Integral](size: Long)(byte: A) = ByteVector.fill(size)(byte) def apply(array: Array[Byte]) = ByteVector(array) def apply(bytes: Array[Byte], offset: Int, length: Int) = ByteVector(bytes, offset, length) def apply[A: Integral](bytes: A*) = ByteVector(bytes: _*) def apply(bytes: Vector[Byte]) = ByteVector(bytes) def apply(buffer: ByteBuffer) = ByteVector(buffer) def apply(bs: GenTraversableOnce[Byte]) = ByteVector(bs) def view(bytes: Array[Byte]) = ByteVector(bytes) def view(bytes: ByteBuffer) = ByteVector(bytes) def encodeString(str: String, charset: Charset) = if (str.isEmpty) empty else ByteVector(str getBytes charset) def encodeStringStrict(str: String, charset: Charset) = ByteVector.encodeString(str)(charset) ///////////////// QUERY /////////////////// def size(value: ByteVector) = value.size def byteAt(value: ByteVector, ix: Long) = value(ix) def indexOfSlice(value: ByteVector, slice: ByteVector, startIx: Long) = value.indexOfSlice(slice, startIx) ///////////////// TRANSFORMATION TO ByteVector /////////////////// def update(value: ByteVector, ix: Long, byte: Byte) = value.update(ix, byte) def concat(value: ByteVector, other: ByteVector) = value ++ other def concat(value: ByteVector, byte: Byte) = value :+ byte def concat(byte: Byte, value: ByteVector) = byte +: value def drop(value: ByteVector, n: Long) = value.drop(n) def take(value: ByteVector, n: Long) = value.take(n) def map(value: ByteVector, f: Byte ⇒ Byte) = value.map(f) def reverse(value: ByteVector) = value.reverse def compact(value: ByteVector) = value.compact ///////////////// TRANSFORMATION TO OTHER TYPES /////////////////// def toArray(value: ByteVector) = value.toArray def copyToArray(value: ByteVector, xs: Array[Byte], offset: Int) = value.copyToArray(xs, offset) def copyToArray(value: ByteVector, sourceOffset: Long, xs: Array[Byte], destOffset: Int, len: Int) = value.copyToArray(xs, destOffset, sourceOffset, len) def copyToBuffer(value: ByteVector, buffer: ByteBuffer): Int = value.copyToBuffer(buffer) def copyToOutputStream(value: ByteVector, s: OutputStream) = value.copyToStream(s) def toByteBuffer(value: ByteVector) = value.toByteBuffer def toIndexedSeq(value: ByteVector): IndexedSeq[Byte] = value.toIndexedSeq def toSeq(value: ByteVector): Seq[Byte] = value.toSeq def decodeString(value: ByteVector, charset: Charset): Either[CharacterCodingException, String] = value.decodeString(charset) ///////////////// ITERATION /////////////////// def foldLeft[A](value: ByteVector, z: A, f: (A, Byte) ⇒ A) = value.foldLeft(z)(f) def foldRight[A](value: ByteVector, z: A, f: (Byte, A) ⇒ A) = value.foldRight(z)(f) def foreach(value: ByteVector, f: Byte ⇒ Unit) = value.foreach(f) }
Example 149
Source File: ByteStringBytes.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.compat.akka.impl import java.io.OutputStream import java.nio.charset.{CharacterCodingException, Charset} import java.nio.{ByteBuffer, CharBuffer} import java.util import scala.annotation.tailrec import scala.collection.GenTraversableOnce import akka.util.ByteString import swave.core.io.Bytes import swave.core.macros._ class ByteStringBytes extends Bytes[ByteString] { ///////////////// CONSTRUCTION /////////////////// def empty = ByteString.empty def fill[A: Integral](size: Long)(byte: A) = { requireArg(0 <= size && size <= Int.MaxValue, "`size` must be >= 0 and <= Int.MaxValue") val b = implicitly[Integral[A]].toInt(byte).toByte apply(Array.fill(size.toInt)(b)) } def apply(array: Array[Byte]) = ByteString(array) def apply(bytes: Array[Byte], offset: Int, length: Int) = ByteString(util.Arrays.copyOfRange(bytes, offset, offset + length)) def apply[A: Integral](bytes: A*) = { val integral = implicitly[Integral[A]] val buf = new Array[Byte](bytes.size) @tailrec def rec(ix: Int): ByteString = if (ix < buf.length) { buf(ix) = integral.toInt(bytes(ix)).toByte rec(ix + 1) } else view(buf) rec(0) } def apply(bytes: Vector[Byte]) = ByteString(bytes: _*) def apply(buffer: ByteBuffer) = ByteString(buffer) def apply(bs: GenTraversableOnce[Byte]) = ByteString(bs.toArray) def view(bytes: Array[Byte]) = ByteString(bytes) // no view-like constructor available on ByteStrings def view(bytes: ByteBuffer) = ByteString(bytes) // no view-like constructor available on ByteStrings def encodeString(str: String, charset: Charset) = ByteString(str, charset.name) def encodeStringStrict(str: String, charset: Charset) = try Right(ByteString(charset.newEncoder.encode(CharBuffer.wrap(str)))) catch { case e: CharacterCodingException ⇒ Left(e) } ///////////////// QUERY /////////////////// def size(value: ByteString): Long = value.size.toLong def byteAt(value: ByteString, ix: Long): Byte = { requireArg(0 <= ix && ix <= Int.MaxValue, "`ix` must be >= 0 and <= Int.MaxValue") value(ix.toInt) } def indexOfSlice(value: ByteString, slice: ByteString, startIx: Long): Long = { requireArg(0 <= startIx && startIx <= Int.MaxValue, "`startIx` must be >= 0 and <= Int.MaxValue") value.indexOfSlice(slice, startIx.toInt).toLong } ///////////////// TRANSFORMATION TO ByteString /////////////////// def update(value: ByteString, ix: Long, byte: Byte) = concat(concat(take(value, ix), byte), drop(value, ix + 1)) def concat(value: ByteString, other: ByteString) = value ++ other def concat(value: ByteString, byte: Byte) = value ++ ByteString(byte) def concat(byte: Byte, value: ByteString) = ByteString(byte) ++ value def drop(value: ByteString, n: Long) = { requireArg(0 <= n && n <= Int.MaxValue, "`n` must be >= 0 and <= Int.MaxValue") value.drop(n.toInt) } def take(value: ByteString, n: Long) = { requireArg(0 <= n && n <= Int.MaxValue, "`n` must be >= 0 and <= Int.MaxValue") value.take(n.toInt) } def map(value: ByteString, f: Byte ⇒ Byte) = value.map(f) def reverse(value: ByteString) = value.reverse def compact(value: ByteString) = value.compact ///////////////// TRANSFORMATION TO OTHER TYPES /////////////////// def toArray(value: ByteString) = value.toArray def copyToArray(value: ByteString, xs: Array[Byte], offset: Int) = value.copyToArray(xs, offset) def copyToArray(value: ByteString, sourceOffset: Long, xs: Array[Byte], destOffset: Int, len: Int) = drop(value, sourceOffset).copyToArray(xs, destOffset, len) def copyToBuffer(value: ByteString, buffer: ByteBuffer): Int = value.copyToBuffer(buffer) def copyToOutputStream(value: ByteString, s: OutputStream) = { @tailrec def rec(ix: Int, size: Int): Unit = if (ix < size) { s.write(value(ix).toInt); rec(ix + 1, size) } rec(0, value.size) } def toByteBuffer(value: ByteString) = value.toByteBuffer def toIndexedSeq(value: ByteString): IndexedSeq[Byte] = value def toSeq(value: ByteString): Seq[Byte] = value def decodeString(value: ByteString, charset: Charset): Either[CharacterCodingException, String] = try Right(charset.newDecoder.decode(toByteBuffer(value)).toString) catch { case e: CharacterCodingException ⇒ Left(e) } ///////////////// ITERATION /////////////////// def foldLeft[A](value: ByteString, z: A, f: (A, Byte) ⇒ A) = value.foldLeft(z)(f) def foldRight[A](value: ByteString, z: A, f: (Byte, A) ⇒ A) = value.foldRight(z)(f) def foreach(value: ByteString, f: Byte ⇒ Unit) = value.foreach(f) }
Example 150
Source File: package.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs import java.net.{URLDecoder, URLEncoder} import java.nio.ByteBuffer import java.nio.charset.Charset import akka.actor.{Address, AddressFromURIString} import akka.util.ByteString import com.typesafe.scalalogging.Logger import org.apache.curator.framework.CuratorFramework import org.apache.zookeeper.CreateMode import org.apache.zookeeper.KeeperException.NodeExistsException import scala.language.implicitConversions import scala.util.Try import scala.util.control.NonFatal import scala.collection.JavaConverters._ package object cluster { trait SegmentationLogic { val segmentsSize:Int def segmentation(partitionKey:ByteString): String = s"segment-${Math.abs(partitionKey.hashCode()) % segmentsSize}" def partitionZkPath(partitionKey:ByteString): String = s"/segments/${segmentation(partitionKey)}/${keyToPath(partitionKey)}" def sizeOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/$$size" def servantsOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/servants" } case class DefaultSegmentationLogic(segmentsSize:Int) extends SegmentationLogic def guarantee(path:String, data:Option[Array[Byte]], mode:CreateMode = CreateMode.EPHEMERAL) (implicit zkClient:CuratorFramework, logger:Logger):String = { try{ data match { case None => zkClient.create.withMode(mode).forPath(path) case Some(bytes) => zkClient.create.withMode(mode).forPath(path, bytes) } } catch{ case e: NodeExistsException => if(data.nonEmpty && data.get.length > 0){ zkClient.setData().forPath(path, data.get) } path case NonFatal(e) => logger.info("leader znode creation failed due to %s\n", e) path } } def safelyDiscard(path:String, recursive: Boolean = true)(implicit zkClient: CuratorFramework): String = Try { if(recursive) zkClient.getChildren.forPath(path).asScala.foreach(child => safelyDiscard(s"$path/$child", recursive)) zkClient.delete.forPath(path) path } getOrElse path def keyToPath(name:String):String = URLEncoder.encode(name, "utf-8") def pathToKey(name:String):String = URLDecoder.decode(name, "utf-8") private[cluster] val BYTES_OF_INT = Integer.SIZE / java.lang.Byte.SIZE implicit def intToBytes(integer:Int):Array[Byte] = { val buf = ByteBuffer.allocate(BYTES_OF_INT) buf.putInt(integer) buf.rewind buf.array() } val UTF_8 = Charset.forName("utf-8") implicit class ByteConversions(val bytes: Array[Byte]) extends AnyVal { def toAddress: Option[Address] = Option(bytes) flatMap (b => if (b.length <= 0) None else Some(AddressFromURIString(new String(b, UTF_8)))) def toInt: Int = ByteBuffer.wrap(bytes).getInt def toUtf8: String = new String(bytes, UTF_8) def toByteString: ByteString = ByteString(bytes) def toAddressSet: Set[Address] = Try { new String(bytes, UTF_8).split("[,]").map(seg => AddressFromURIString(seg.trim)).toSet } getOrElse Set.empty } implicit def byteStringToUtf8(bs:ByteString):String = new String(bs.toArray, UTF_8) implicit def addressToBytes(address:Address):Array[Byte] = { address.toString.getBytes(UTF_8) } implicit def addressSetToBytes(members: Set[Address]): Array[Byte] = { members.mkString(",").getBytes(UTF_8) } }
Example 151
Source File: GitHubSourceAcl.scala From kafka-security-manager with MIT License | 5 votes |
package com.github.simplesteph.ksm.source import java.io.{Reader, StringReader} import java.nio.charset.Charset import java.util.Base64 import com.fasterxml.jackson.databind.ObjectMapper import com.typesafe.config.Config import org.slf4j.LoggerFactory import skinny.http.{HTTP, HTTPException, Request, Response} import scala.util.Try class GitHubSourceAcl extends SourceAcl { private val log = LoggerFactory.getLogger(classOf[GitHubSourceAcl]) override val CONFIG_PREFIX: String = "github" final val USER_CONFIG = "user" final val REPO_CONFIG = "repo" final val FILEPATH_CONFIG = "filepath" final val BRANCH_CONFIG = "branch" final val HOSTNAME_CONFIG = "hostname" final val AUTH_BASIC_CONFIG = "auth.basic" final val AUTH_TOKEN_CONFIG = "auth.token" var lastModified: Option[String] = None val objectMapper = new ObjectMapper() var user: String = _ var repo: String = _ var filepath: String = _ var branch: String = _ var hostname: String = _ var basicOpt: Option[String] = _ var tokenOpt: Option[String] = _ override def close(): Unit = { // HTTP } }
Example 152
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions private val sessionId: UUID = java.util.UUID.randomUUID().toString implicit val timeout = Timeout(21474835.seconds) // Maximum delay implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] val parentHeader = Json.parse(message.frames(delimiterIndex + 3)).validate[ParentHeader].fold[ParentHeader]( // TODO: Investigate better solution than setting parentHeader to null for {} (invalid: Seq[(JsPath, Seq[ValidationError])]) => null, //HeaderBuilder.empty, (valid: ParentHeader) => valid ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T](json: String, reads: Reads[T], handler: T => Unit) : Unit = { Json.parse(json).validate[T](reads).fold( (invalid: Seq[(JsPath, Seq[ValidationError])]) => logger.error(s"Could not parse JSON, ${json}"), (content: T) => handler(content) ) } def getSessionId = sessionId def toKernelMessage(message: ExecuteRequest): KernelMessage = { // construct a kernel message whose content is an ExecuteRequest val id = java.util.UUID.randomUUID().toString val header = Header( id, "spark", sessionId, MessageType.Incoming.ExecuteRequest.toString, "5.0") KMBuilder().withIds(Seq[Array[Byte]]()).withSignature("").withHeader(header) .withParentHeader(HeaderBuilder.empty).withContentString(message).build } }
Example 153
Source File: BitbucketServerSourceAcl.scala From kafka-security-manager with MIT License | 5 votes |
package com.github.simplesteph.ksm.source import java.io.{Reader, StringReader} import java.nio.charset.Charset import java.util.Base64 import com.fasterxml.jackson.databind.ObjectMapper import com.typesafe.config.Config import org.slf4j.LoggerFactory import skinny.http.{HTTP, HTTPException, Request, Response} class BitbucketServerSourceAcl extends SourceAcl { private val log = LoggerFactory.getLogger(classOf[BitbucketServerSourceAcl]) override val CONFIG_PREFIX: String = "bitbucket-server" final val HOSTNAME_CONFIG = "hostname" final val PORT_CONFIG = "port" final val PROTOCOL_CONFIG = "protocol" final val PROJECT_CONFIG = "project" final val REPO_CONFIG = "repo" final val FILEPATH_CONFIG = "filepath" final val AUTH_USERNAME_CONFIG = "auth.username" final val AUTH_PASSWORD_CONFIG = "auth.password" final val BRANCH_CONFIG = "branch" var lastCommit: Option[String] = None val objectMapper = new ObjectMapper() var http: HTTP = HTTP var hostname: String = _ var port: String = _ var protocol: String = _ var project: String = _ var repo: String = _ var filePath: String = _ var username: String = _ var password: String = _ var branch: Option[String] = _ override def close(): Unit = { // HTTP } }
Example 154
Source File: GitLabSourceAcl.scala From kafka-security-manager with MIT License | 5 votes |
package com.github.simplesteph.ksm.source import java.io.{Reader, StringReader} import java.nio.charset.Charset import java.util.Base64 import com.fasterxml.jackson.databind.ObjectMapper import com.typesafe.config.Config import org.slf4j.LoggerFactory import skinny.http.{HTTP, HTTPException, Request, Response} class GitLabSourceAcl extends SourceAcl { private val log = LoggerFactory.getLogger(classOf[GitLabSourceAcl]) override val CONFIG_PREFIX: String = "gitlab" final val REPOID_CONFIG = "repoid" final val FILEPATH_CONFIG = "filepath" final val BRANCH_CONFIG = "branch" final val HOSTNAME_CONFIG = "hostname" final val ACCESSTOKEN_CONFIG = "accesstoken" var lastModified: Option[String] = None val objectMapper = new ObjectMapper() var repoid: String = _ var filepath: String = _ var branch: String = _ var hostname: String = _ var accessToken: String = _ override def close(): Unit = { // HTTP } }
Example 155
Source File: playjson.scala From pulsar4s with Apache License 2.0 | 5 votes |
package com.sksamuel.pulsar4s import java.nio.charset.Charset import org.apache.pulsar.client.api.Schema import org.apache.pulsar.common.schema.{SchemaInfo, SchemaType} import play.api.libs.json.{Json, Reads, Writes} import scala.annotation.implicitNotFound package object playjson { @implicitNotFound("No Writes or Reads for type ${T} found. Bring an implicit Writes[T] and Reads[T] instance in scope") implicit def playSchema[T: Manifest](implicit w: Writes[T], r: Reads[T]): Schema[T] = new Schema[T] { override def clone(): Schema[T] = this override def encode(t: T): Array[Byte] = Json.stringify(Json.toJson(t)(w)).getBytes(Charset.forName("UTF-8")) override def decode(bytes: Array[Byte]): T = Json.parse(bytes).as[T] override def getSchemaInfo: SchemaInfo = new SchemaInfo() .setName(manifest[T].runtimeClass.getCanonicalName) .setType(SchemaType.JSON) .setSchema("""{"type":"any"}""".getBytes("UTF-8")) } }
Example 156
Source File: package.scala From pulsar4s with Apache License 2.0 | 5 votes |
package com.sksamuel.pulsar4s import java.nio.charset.Charset import org.apache.pulsar.client.api.Schema import org.apache.pulsar.common.schema.{SchemaInfo, SchemaType} import scala.annotation.implicitNotFound package object sprayjson { import spray.json._ @implicitNotFound("No RootJsonWriter for type ${T} found. Bring an implicit RootJsonWriter[T] instance in scope") implicit def spraySchema[T: Manifest](implicit w: RootJsonWriter[T], r: RootJsonReader[T]): Schema[T] = new Schema[T] { override def clone(): Schema[T] = this override def encode(t: T): Array[Byte] = w.write(t).compactPrint.getBytes(Charset.forName("UTF-8")) override def decode(bytes: Array[Byte]): T = r.read(new String(bytes, "UTF-8").parseJson) override def getSchemaInfo: SchemaInfo = new SchemaInfo() .setName(manifest[T].runtimeClass.getCanonicalName) .setType(SchemaType.JSON) .setSchema("""{"type":"any"}""".getBytes("UTF-8")) } }
Example 157
Source File: TeeCommandTest.scala From shellbase with Apache License 2.0 | 5 votes |
package com.sumologic.shellbase.commands import java.nio.charset.Charset import java.nio.file.{Files, Path} import com.sumologic.shellbase.CommonWordSpec import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import scala.collection.JavaConverters._ import scala.util.Random @RunWith(classOf[JUnitRunner]) class TeeCommandTest extends CommonWordSpec { "TeeCommand" should { "execute a subcommand and propagate exit code" in { var calls = 0 def callCheck(ret: Boolean)(input: String): Boolean = { input should be("hi") calls += 1 ret } new TeeCommand(callCheck(true)).executeLine(List("`hi`", "-o", getTempFilePath().toString)) should be(true) calls should be(1) new TeeCommand(callCheck(false)).executeLine(List("`hi`", "-o", getTempFilePath().toString)) should be(false) calls should be(2) } "degrade nicely with malformatted input" in { new TeeCommand(_ => true).executeLine(List.empty) should be(false) new TeeCommand(_ => true).executeLine(List("test")) should be(false) } "write output to file, and support append mode" in { def printMessage(str: String): Boolean = { println(str) true } val tempFile = getTempFilePath() new TeeCommand(printMessage).executeLine(List("`hi mom`", "-o", tempFile.toString)) // The first line is the debug line, so everything after is logged readTempFile(tempFile) should be(List("hi mom")) // We should override since not in append mode new TeeCommand(printMessage).executeLine(List("`hi mom 2`", "-o", tempFile.toString)) // The first line is the debug line, so everything after is logged readTempFile(tempFile) should be(List("hi mom 2")) // We have both 2 and 3 since in append move new TeeCommand(printMessage).executeLine(List("`hi mom 3`", "-o", tempFile.toString, "-a")) // The first line is the debug line, so everything after is logged readTempFile(tempFile) should be(List("hi mom 2", "hi mom 3")) } } private def getTempFilePath(): Path = { Files.createTempFile("teecommand", ".tmp") } private def readTempFile(path: Path): List[String] = { Files.readAllLines(path, Charset.defaultCharset()).asScala.filterNot(_.startsWith("Running")).toList } }
Example 158
Source File: TestResults.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.util import java.nio.file.Files import java.nio.file.Paths import java.nio.charset.Charset import java.nio.charset.StandardCharsets import java.io.ByteArrayOutputStream import java.io.PrintWriter import org.rogach.scallop.ScallopConf object TestResults { def main(args: Array[String]) { val config = new TestResultConfig(args) println("running tests....") parseTestResults(config.sbtPath(),config.sbtCmd()) } def parseTestResults(sbtPath:String = "/opt/local/bin/sbt", sbtCmd:String = "test") = { val procOutput = runCommand(Seq(sbtPath,sbtCmd))._2.replaceAll("""\x1b\[[0-9;]*[a-zA-Z]""", "") val pattern = """(?m)^.*\[info\] Total.*$|^.*\[info\] Finished.*$|^.*\[info\] [\d]+ examp.*$""".r val header = "test_name,seconds,examples,expectations,failures,errors,skipped\n" val pattern2 = """\[info\] Total for specification (\w+)\s+\[info\] Finished in (.+)\R\[info\] (.+)\R""".r val pattern3 = """([a-zA-Z]+): (?:(\d+) minutes? )?(?:(\d+) seconds?[,:] )?(?:(\d+) ms[,:] )?(\d+) examples?, (?:(\d+) expectations?, )?(\d+) failures?, (\d+) errors?(?:, (\d+) skipped)?""".r val string = pattern2.findAllMatchIn(procOutput).map(mat => s"${mat.group(1)}: ${mat.group(2)}: ${mat.group(3)}") .map(nline => nline match { case pattern3(test_name,minutes,seconds,ms,examples,expectations,failures,errors,skipped) => { val allseconds = (minutes match { case "" => 0 case null => 0 case x => x.toInt*60 }) + (seconds match { case "" => 0 case null => 0 case x => x.toInt }) + (ms match { case "" => 0.0 case null => 0.0 case x => x.toDouble/1000.0 }) s"$test_name,$allseconds,$examples,$expectations,$failures,$errors,$skipped" } }).mkString("\n") val outStr = header + string println(outStr) Files.write(Paths.get("test_output.csv"), outStr.getBytes(StandardCharsets.UTF_8)) } import sys.process._ def runCommand(cmd: Seq[String]): (Int, String, String) = { val stdoutStream = new ByteArrayOutputStream val stderrStream = new ByteArrayOutputStream val stdoutWriter = new PrintWriter(stdoutStream) val stderrWriter = new PrintWriter(stderrStream) val exitValue = cmd.!(ProcessLogger(stdoutWriter.println, stderrWriter.println)) stdoutWriter.close() stderrWriter.close() (exitValue, stdoutStream.toString, stderrStream.toString) } } class TestResultConfig(arguments: Seq[String]) extends ScallopConf(arguments) { val experimental = opt[List[String]]("X", default = Some(List[String]())) val sparkHost = opt[String]("sparkHost", descr = "The IP or hostname of the spark master", default = Some("spark-master.local")) val sparkPort = opt[String]("sparkPort", descr = "The port of the spark master", default = Some("7077")) val sbtPath = opt[String]("sbtPath", descr = "The path to sbt binary", default = Some("/opt/local/bin/sbt")) val sbtCmd = opt[String]("sbtCmd", descr = "The sbt command to run", default = Some("test")) }
Example 159
Source File: CodecStreams.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources import java.io.{InputStream, OutputStream, OutputStreamWriter} import java.nio.charset.{Charset, StandardCharsets} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.io.compress._ import org.apache.hadoop.mapreduce.JobContext import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat import org.apache.hadoop.util.ReflectionUtils import org.apache.spark.TaskContext object CodecStreams { private def getDecompressionCodec(config: Configuration, file: Path): Option[CompressionCodec] = { val compressionCodecs = new CompressionCodecFactory(config) Option(compressionCodecs.getCodec(file)) } def createInputStream(config: Configuration, file: Path): InputStream = { val fs = file.getFileSystem(config) val inputStream: InputStream = fs.open(file) getDecompressionCodec(config, file) .map(codec => codec.createInputStream(inputStream)) .getOrElse(inputStream) } def getCompressionExtension(context: JobContext): String = { getCompressionCodec(context) .map(_.getDefaultExtension) .getOrElse("") } }
Example 160
Source File: GraphQLRequestUnmarshaller.scala From graphql-gateway with Apache License 2.0 | 5 votes |
package sangria.gateway.http import java.nio.charset.Charset import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.server.Directive0 import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import sangria.ast.Document import sangria.parser.QueryParser import sangria.renderer.{QueryRenderer, QueryRendererConfig} import scala.collection.immutable.Seq object GraphQLRequestUnmarshaller { val `application/graphql` = MediaType.applicationWithFixedCharset("graphql", HttpCharsets.`UTF-8`, "graphql") def explicitlyAccepts(mediaType: MediaType): Directive0 = headerValuePF { case Accept(ranges) if ranges.exists(range ⇒ !range.isWildcard && range.matches(mediaType)) ⇒ ranges }.flatMap(_ ⇒ pass) def includeIf(include: Boolean): Directive0 = if (include) pass else reject def unmarshallerContentTypes: Seq[ContentTypeRange] = mediaTypes.map(ContentTypeRange.apply) def mediaTypes: Seq[MediaType.WithFixedCharset] = List(`application/graphql`) implicit final def documentMarshaller(implicit config: QueryRendererConfig = QueryRenderer.Compact): ToEntityMarshaller[Document] = Marshaller.oneOf(mediaTypes: _*) { mediaType ⇒ Marshaller.withFixedContentType(ContentType(mediaType)) { json ⇒ HttpEntity(mediaType, QueryRenderer.render(json, config)) } } implicit final val documentUnmarshaller: FromEntityUnmarshaller[Document] = Unmarshaller.byteStringUnmarshaller .forContentTypes(unmarshallerContentTypes: _*) .map { case ByteString.empty ⇒ throw Unmarshaller.NoContentException case data ⇒ import sangria.parser.DeliveryScheme.Throw QueryParser.parse(data.decodeString(Charset.forName("UTF-8"))) } }
Example 161
Source File: KernelOutputStream.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.stream import java.io.OutputStream import java.nio.charset.Charset import org.apache.toree.kernel.protocol.v5.content.StreamContent import org.apache.toree.kernel.protocol.v5.{SystemActorType, MessageType, KMBuilder} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.utils.{LogLike, ScheduledTaskManager} import scala.collection.mutable.ListBuffer import KernelOutputStream._ object KernelOutputStream { val DefaultStreamType = "stdout" val DefaultSendEmptyOutput = false } override def write(b: Int): Unit = internalBytes.synchronized { // Begin periodic flushing if this is a new set of bytes enableAutoFlush() internalBytes += b.toByte } }
Example 162
Source File: KernelInputStream.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.stream import java.io.InputStream import java.nio.charset.Charset import akka.pattern.ask import org.apache.toree.kernel.protocol.v5.content.InputRequest import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import org.apache.toree.kernel.protocol.v5.kernel.Utilities.timeout import org.apache.toree.kernel.protocol.v5.{KMBuilder, MessageType} import scala.collection.mutable.ListBuffer import scala.concurrent.{Await, Future} import KernelInputStream._ object KernelInputStream { val DefaultPrompt = "" val DefaultPassword = false } override def read(): Int = { if (!this.hasByte) this.requestBytes() this.nextByte() } private def hasByte: Boolean = internalBytes.nonEmpty private def nextByte(): Int = { val byte = internalBytes.head internalBytes = internalBytes.tail byte } private def requestBytes(): Unit = { val inputRequest = InputRequest(prompt, password) // NOTE: Assuming already provided parent header and correct ids val kernelMessage = kmBuilder .withHeader(MessageType.Outgoing.InputRequest) .withContentString(inputRequest) .build // NOTE: The same handler is being used in both request and reply val responseFuture: Future[String] = (actorLoader.load(MessageType.Incoming.InputReply) ? kernelMessage) .mapTo[String] // Block until we get a response import scala.concurrent.duration._ internalBytes ++= Await.result(responseFuture, Duration.Inf).getBytes(EncodingType) } }
Example 163
Source File: ZeromqKernelMessageSocket.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorSystem, ActorRef, Actor} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage //import org.apache.toree.kernel.protocol.v5.kernel.ZMQMessage import org.apache.toree.kernel.protocol.v5.KernelMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.utils.MessageLogSupport abstract class ZeromqKernelMessageSocket( actorSocketFunc: (ActorSystem, ActorRef) => ActorRef, actorForwardFunc: () => ActorSelection ) extends Actor with MessageLogSupport { val actorSocketRef = actorSocketFunc(context.system, self) val actorForwardRef = actorForwardFunc() override def receive: Receive = { case message: ZMQMessage => val kernelMessage: KernelMessage = message logMessage(kernelMessage) // Grab the strings to use for signature verification val zmqStrings = message.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) // TODO: This assumes NO extra buffers, refactor? // Forward along our message (along with the strings used for // signatures) actorForwardRef ! ((zmqStrings, kernelMessage)) case message: KernelMessage => val zmqMessage: ZMQMessage = message logMessage(message) actorSocketRef ! zmqMessage } }
Example 164
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions implicit val timeout = Timeout(21474835.seconds) implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] // TODO: Investigate better solution than setting parentHeader to null for {} val parentHeader = parseAndHandle(message.frames(delimiterIndex + 3), ParentHeader.headerReads, handler = (valid: ParentHeader) => valid, errHandler = _ => null ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U) : U = { parseAndHandle(json, reads, handler, (invalid: Seq[(JsPath, Seq[ValidationError])]) => { logger.error(s"Could not parse JSON, ${json}") throw new Throwable(s"Could not parse JSON, ${json}") } ) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U, errHandler: Seq[(JsPath, Seq[ValidationError])] => U) : U = { Json.parse(json).validate[T](reads).fold( errHandler, (content: T) => handler(content) ) } }
Example 165
Source File: ShellSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities} import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object ShellSpec { val config =""" akka { loglevel = "WARNING" }""" } class ShellSpec extends TestKit( ActorSystem( "ShellActorSpec", ConfigFactory.parseString(ShellSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Shell") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 166
Source File: StdinSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.kernel.protocol.v5Test._ import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import com.typesafe.config.ConfigFactory import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import test.utils.MaxAkkaTestTimeout object StdinSpec { val config =""" akka { loglevel = "WARNING" }""" } class StdinSpec extends TestKit(ActorSystem( "StdinActorSpec", ConfigFactory.parseString(StdinSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Stdin") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 167
Source File: JdbcFlow.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.connector.sql import java.nio.charset.{ Charset, StandardCharsets } import java.sql.ResultSet import akka.NotUsed import akka.stream.scaladsl.Flow import akka.util.ByteString import fusion.jdbc.util.JdbcUtils import scala.collection.immutable case class JdbcResultSet(rs: ResultSet, values: immutable.IndexedSeq[AnyRef]) object JdbcFlow { def flowToText(valueSeparator: Char = ','): Flow[immutable.IndexedSeq[AnyRef], String, NotUsed] = Flow[immutable.IndexedSeq[AnyRef]].map { values => val builder = new java.lang.StringBuilder() var i = 0 while (i < values.length) { builder.append(values(i).toString) i += 1 if (i < values.length) { builder.append(valueSeparator) } } builder.toString } def flowToSeq: Flow[ResultSet, immutable.IndexedSeq[AnyRef], NotUsed] = Flow[ResultSet].map { rs => val metaData = rs.getMetaData (1 to rs.getMetaData.getColumnCount).map { i => val typ = metaData.getColumnType(i) if (JdbcUtils.isString(typ)) { rs.getString(i) } else rs.getObject(i) } } def flowToByteString( valueSeparator: Char = ',', charset: Charset = StandardCharsets.UTF_8): Flow[immutable.IndexedSeq[AnyRef], ByteString, NotUsed] = Flow[immutable.IndexedSeq[AnyRef]].map { values => val builder = ByteString.newBuilder var i = 0 while (i < values.length) { builder.putBytes(values(i).toString.getBytes(charset)) i += 1 if (i < values.length) { builder.putByte(valueSeparator.toByte) } } builder.result() } def flowJdbcResultSet: Flow[ResultSet, JdbcResultSet, NotUsed] = Flow[ResultSet].map { rs => val metaData = rs.getMetaData JdbcResultSet(rs, (1 to metaData.getColumnCount).map(i => rs.getObject(i))) } }
Example 168
Source File: JobMessage.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.message.job import java.io.File import java.nio.charset.Charset import java.nio.file.Path import java.time.OffsetDateTime import java.util.concurrent.TimeUnit import akka.http.scaladsl.server.directives.FileInfo import fusion.json.CborSerializable import helloscala.common.data.{ IntValueName, StringValueName, ValueName } import mass.common.page.{ Page, PageResult } import mass.core.job.JobResult import mass.model.CommonStatus import mass.model.job._ import scala.concurrent.duration.FiniteDuration sealed trait JobMessage extends CborSerializable sealed trait JobResponse extends CborSerializable final case class JobErrorResponse(status: Int, message: String) extends JobResponse final case class ProgramVersionItem(programId: String, versions: Seq[StringValueName]) final case class JobGetAllOptionReq() extends JobMessage final case class JobGetAllOptionResp( program: Seq[StringValueName], triggerType: Seq[ValueName[String]], programVersion: Seq[ProgramVersionItem], jobStatus: Seq[IntValueName]) extends JobResponse final case class JobScheduleReq(key: String) extends JobMessage final case class JobCreateReq(key: Option[String], item: JobItem, trigger: JobTrigger) extends JobMessage final case class JobCreateResp(schedule: Option[JobSchedule]) extends JobResponse final case class JobUpdateReq( key: String, program: Option[Program] = None, programOptions: Option[Seq[String]] = None, programMain: Option[String] = None, programArgs: Option[Seq[String]] = None, programVersion: Option[String] = None, resources: Option[Map[String, String]] = None, data: Option[Map[String, String]] = None, description: Option[String] = None, dependentJobKeys: Option[Seq[String]] = None, name: Option[String] = None, triggerType: Option[TriggerType] = None, triggerEvent: Option[String] = None, startTime: Option[OffsetDateTime] = None, endTime: Option[OffsetDateTime] = None, // 重复次数 repeat: Option[Int] = None, // 每次重复间隔 interval: Option[FiniteDuration] = None, cronExpress: Option[String] = None, failedRetries: Option[Int] = None, timeout: Option[FiniteDuration] = None, alarmEmails: Option[Seq[String]] = None, status: Option[CommonStatus] = None) extends JobMessage final case class JobFindReq(key: String) extends JobMessage final case class JobSchedulerResp(schedule: Option[JobSchedule]) extends JobResponse final case class JobPageReq(page: Int = 1, size: Int = 20, key: Option[String] = None) extends Page with JobMessage final case class JobPageResp(content: Seq[JobSchedule], totalElements: Long, page: Int, size: Int) extends PageResult[JobSchedule] with JobResponse final case class JobListReq(key: String) extends JobMessage final case class JobListResp(items: Seq[JobSchedule]) extends JobResponse final case class SchedulerJobResult( start: OffsetDateTime, end: OffsetDateTime, exitValue: Int, outPath: String, errPath: String) extends JobResult { def runDuration: FiniteDuration = FiniteDuration(java.time.Duration.between(start, end).toNanos, TimeUnit.NANOSECONDS).toCoarsest } final case class JobUploadJobReq(file: Path, fileName: String, charset: Charset) extends JobMessage final case class JobUploadJobResp(resps: Seq[JobCreateResp]) extends JobResponse final case class JobUploadFilesReq(items: Seq[(FileInfo, File)]) extends JobMessage final case class JobUploadFilesResp(resources: Seq[IntValueName]) extends JobResponse
Example 169
Source File: JobUtils.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.job.util import java.io.File import java.nio.charset.Charset import java.nio.file.{ Files, Path, StandardCopyOption } import java.util.zip.ZipFile import com.typesafe.scalalogging.StrictLogging import helloscala.common.Configuration import helloscala.common.util.{ DigestUtils, Utils } import mass.common.util.FileUtils import mass.core.job.JobConstants import mass.job.JobSettings import mass.message.job._ import mass.model.job.{ JobItem, JobTrigger } import scala.concurrent.{ ExecutionContext, Future } object JobUtils extends StrictLogging { case class JobZipInternal private (configs: Vector[JobCreateReq], entries: Vector[Path]) def uploadJob(jobSettings: JobSettings, req: JobUploadJobReq)(implicit ec: ExecutionContext): Future[JobZip] = Future { val sha256 = DigestUtils.sha256HexFromPath(req.file) val dest = jobSettings.jobSavedDir.resolve(sha256.take(2)).resolve(sha256) val jobZipInternal = parseJobZip(req.file, req.charset, dest.resolve(JobConstants.DIST)) match { case Right(v) => v case Left(e) => throw e } val zipPath = dest.resolve(req.fileName) Files.move(req.file, zipPath, StandardCopyOption.REPLACE_EXISTING) JobZip(zipPath, jobZipInternal.configs, jobZipInternal.entries) } @inline def parseJobZip(file: Path, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = parseJobZip(file.toFile, charset, dest) def parseJobZip(file: File, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = Utils.either { import scala.jdk.CollectionConverters._ import scala.language.existentials val zip = new ZipFile(file, charset) try { val (confEntries, fileEntries) = zip .entries() .asScala .filterNot(entry => entry.isDirectory) .span(entry => entry.getName.endsWith(JobConstants.ENDS_SUFFIX) && !entry.isDirectory) val configs = confEntries.map(confEntry => parseJobConf(FileUtils.getString(zip.getInputStream(confEntry), charset, "\n")) match { case Right(config) => config case Left(e) => throw e }) val buf = Array.ofDim[Byte](1024) val entryPaths = fileEntries.map { entry => val entryName = entry.getName val savePath = dest.resolve(entryName) if (!Files.isDirectory(savePath.getParent)) { Files.createDirectories(savePath.getParent) } FileUtils.write(zip.getInputStream(entry), Files.newOutputStream(savePath), buf) // zip entry存磁盘 savePath } JobZipInternal(configs.toVector, entryPaths.toVector) } finally { if (zip ne null) zip.close() } } def parseJobConf(content: String): Either[Throwable, JobCreateReq] = Utils.either { val conf = Configuration.parseString(content) val jobItem = JobItem(conf.getConfiguration("item")) val jobTrigger = JobTrigger(conf.getConfiguration("trigger")) JobCreateReq(conf.get[Option[String]]("key"), jobItem, jobTrigger) } } case class JobZip(zipPath: Path, configs: Vector[JobCreateReq], entries: Vector[Path])
Example 170
Source File: TextDisplay.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.display import java.io.{ByteArrayOutputStream, InputStream} import java.net.{HttpURLConnection, URL, URLConnection} import java.nio.charset.{Charset, StandardCharsets} import scala.util.Try abstract class TextDisplay extends UpdatableDisplay { def contentOrUrl: Either[URL, String] def content: Option[String] = contentOrUrl.right.toOption def url: Option[URL] = contentOrUrl.left.toOption def finalContent: String = contentOrUrl match { case Left(url) => TextDisplay.urlContent(url) case Right(c) => c } def withContent(code: String): UpdatableDisplay def withUrl(url: String): UpdatableDisplay } object TextDisplay { type Builder[T] = Display.Builder[String, T] private[almond] def readFully(is: InputStream): Array[Byte] = { val buffer = new ByteArrayOutputStream val data = Array.ofDim[Byte](16384) var nRead = 0 while ( { nRead = is.read(data, 0, data.length) nRead != -1 }) buffer.write(data, 0, nRead) buffer.flush() buffer.toByteArray } def urlContent(url: URL): String = { var conn: URLConnection = null val (rawContent, charsetOpt) = try { conn = url.openConnection() conn.setConnectTimeout(5000) // allow users to tweak that? val b = readFully(conn.getInputStream) val charsetOpt0 = conn match { case conn0: HttpURLConnection => conn0 .getContentType .split(';') .map(_.trim) .find(_.startsWith("charset=")) .map(_.stripPrefix("charset=")) .filter(Charset.isSupported) .map(Charset.forName) case _ => None } (b, charsetOpt0) } finally { if (conn != null) { Try(conn.getInputStream.close()) conn match { case conn0: HttpURLConnection => Try(conn0.getErrorStream.close()) Try(conn0.disconnect()) case _ => } } } new String(rawContent, charsetOpt.getOrElse(StandardCharsets.UTF_8)) } }
Example 171
Source File: FunctionOutputStream.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.{OutputStream, PrintStream} import java.nio.{ByteBuffer, CharBuffer} import java.nio.charset.{Charset, CoderResult} class FunctionOutputStream( inputBufferSize: Int, outputBufferSize: Int, internalCharset: Charset, f: String => Unit ) extends OutputStream { // not thread-safe private val decoder = internalCharset.newDecoder() private val inArray = Array.ofDim[Byte](inputBufferSize) private val outArray = Array.ofDim[Char](outputBufferSize) private val writeBuf = ByteBuffer.wrap(inArray) private val out = CharBuffer.wrap(outArray) private def flushIfNeeded(): Unit = if (!writeBuf.hasRemaining) flush() def write(b: Int): Unit = { writeBuf.put(b.toByte) // hope toByte doesn't box b flushIfNeeded() } override def write(b: Array[Byte], off: Int, len: Int) = { var off0 = off var len0 = len while (len0 > 0) { val take = math.min(len0, writeBuf.remaining()) assert(take > 0) writeBuf.put(b, off0, take) off0 = off0 + take len0 = len0 - take flushIfNeeded() } assert(len0 == 0) assert(off0 == off + len) } override def flush(): Unit = { super.flush() val readBuf = ByteBuffer.wrap(inArray, 0, writeBuf.position()) var r: CoderResult = null while (r == null || r.isOverflow) { if (r != null) { readBuf.position(0) readBuf.limit(writeBuf.position()) } r = decoder.decode(readBuf, out, false) val outLen = out.position() if (r.isError || (r.isOverflow && outLen == 0)) r.throwException() else { if (outLen > 0) { val s = new String(outArray, 0, outLen) out.clear() f(s) } val read = readBuf.position() val avail = writeBuf.position() val remaining = avail - read writeBuf.position(remaining) if (remaining > 0) System.arraycopy(inArray, read, inArray, 0, remaining) } } } def printStream(): PrintStream = new PrintStream(this, true, internalCharset.name()) }
Example 172
Source File: CaptureImpl.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.PrintStream import java.nio.charset.{Charset, StandardCharsets} final class CaptureImpl( inputBufferSize: Int = 1024, outputBufferSize: Int = 1024, internalCharset: Charset = StandardCharsets.UTF_8 ) extends Capture { // not thread-safe private var out0: String => Unit = _ private var err0: String => Unit = _ val out: PrintStream = new FunctionOutputStream( inputBufferSize, outputBufferSize, internalCharset, s => if (out0 != null) out0(s) ).printStream() val err: PrintStream = new FunctionOutputStream( inputBufferSize, outputBufferSize, internalCharset, s => if (err0 != null) err0(s) ).printStream() def apply[T]( stdout: String => Unit, stderr: String => Unit )( block: => T ): T = try { out0 = stdout err0 = stderr Console.withOut(out) { Console.withErr(err) { val oldOut = System.out val oldErr = System.err try { System.setOut(out) System.setErr(err) block } finally { System.setOut(oldOut) System.setErr(oldErr) } } } } finally { out0 = null err0 = null } }
Example 173
Source File: FunctionInputStream.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.InputStream import java.nio.ByteBuffer import java.nio.charset.Charset import java.nio.charset.StandardCharsets.UTF_8 import scala.annotation.tailrec class FunctionInputStream(internalCharset: Charset, f: => Option[String]) extends InputStream { // not thread-safe private var bufferOpt = Option.empty[ByteBuffer] private var done = false @tailrec private def maybeFetchNewBuffer(): Option[ByteBuffer] = if (done) None else { if (bufferOpt.forall(!_.hasRemaining)) { val s = f s match { case Some(value) => val b0 = ByteBuffer.wrap(value.getBytes(UTF_8)).asReadOnlyBuffer() bufferOpt = Some(b0) case None => done = true bufferOpt = None } maybeFetchNewBuffer() // maybe we were given an empty string, and need to call f again } else bufferOpt } def read(): Int = maybeFetchNewBuffer() .fold(-1)(_.get()) override def read(b: Array[Byte], off: Int, len: Int): Int = // InputStream.read does these 3 checks upfront too if (b == null) throw new NullPointerException else if (off < 0 || len < 0 || len > b.length - off) throw new IndexOutOfBoundsException else if (len == 0) 0 else maybeFetchNewBuffer().fold(-1) { b0 => val toRead = math.min(b0.remaining(), len) b0.get(b, off, toRead) toRead } def clear(): Unit = { done = false bufferOpt = None } }
Example 174
Source File: ScalazMain.scala From advanced-scala-code with Apache License 2.0 | 5 votes |
import java.nio.charset.Charset import java.util.concurrent.Executors import org.asynchttpclient.DefaultAsyncHttpClient import scala.concurrent.Future import scalaz.{-\/, \/, \/-} import scalaz.concurrent.Task object ScalazMain { def main(args: Array[String]): Unit = { def performAction(num: Int): Unit = println(s"Task #$num is executing in ${Thread.currentThread().getName}") import scala.concurrent.ExecutionContext.Implicits.global val result1F = Future { performAction(0) } val result2F = Future.successful { performAction(1) } // Executes immediately in the main thread val result2T = Task.now { performAction(2) } // Schedules an execution in a default worker thread // = Executors.newFixedThreadPool(Math.max(4, Runtime.getRuntime.availableProcessors), DefaultDaemonThreadFactory) val result3T = Task { performAction(3) } // Lifts a code block to a Task without scheduling an execution val result4T = Task.delay { performAction(4) } result3T.unsafePerformAsync(_ => ()) implicit val executorService = Executors.newSingleThreadExecutor() val result5T = Task { performAction(5) } result3T.unsafePerformSync val asyncHttpClient = new DefaultAsyncHttpClient() arm.ArmUtils.using(asyncHttpClient) { val result6T = Task.async[String](handler => { asyncHttpClient.prepareGet("https://httpbin.org/get").execute(). toCompletableFuture.whenComplete { (response, exc) => { if (exc == null) { handler(\/.right(response.getResponseBody(Charset.forName("UTF-8")))) } else handler(-\/(exc)) }} }) val responseString = result6T.unsafePerformSync println(responseString) } } }
Example 175
Source File: MonixTaskMain.scala From advanced-scala-code with Apache License 2.0 | 5 votes |
object MonixTaskMain { def main(args: Array[String]): Unit = { import org.asynchttpclient.DefaultAsyncHttpClient val asyncHttpClient = new DefaultAsyncHttpClient() arm.ArmUtils.using(asyncHttpClient) { import java.nio.charset.Charset import monix.eval.Task val result6T = Task.create[String]( (_, callback) => { val lf = asyncHttpClient.prepareGet("https://httpbin.org/get").execute() val javaFuture = lf.toCompletableFuture javaFuture.whenComplete { (response, exc) => { if (exc == null) { callback.onSuccess(response.getResponseBody(Charset.forName("UTF-8"))) } else callback.onError(exc) }} import monix.execution.Cancelable Cancelable.apply { () => javaFuture.cancel(true) } }) import monix.execution.Scheduler.Implicits.global val resultCF = result6T.runToFuture import scala.concurrent.Await import scala.concurrent.duration._ val result = Await.result(resultCF, 5.seconds) println(result) } } }
Example 176
Source File: FileSystem.scala From ohara with Apache License 2.0 | 4 votes |
package oharastream.ohara.client.filesystem import java.io.{BufferedReader, BufferedWriter, IOException, InputStreamReader, OutputStreamWriter} import java.nio.charset.{Charset, StandardCharsets} import oharastream.ohara.client.filesystem.ftp.FtpFileSystem import oharastream.ohara.client.filesystem.hdfs.HdfsFileSystem import oharastream.ohara.client.filesystem.smb.SmbFileSystem import oharastream.ohara.common.exception.FileSystemException trait FileSystem extends oharastream.ohara.kafka.connector.storage.FileSystem { def readLines(path: String, encode: String = "UTF-8"): Array[String] = { val reader = new BufferedReader(new InputStreamReader(open(path), Charset.forName(encode))) try Iterator.continually(reader.readLine()).takeWhile(_ != null).toArray finally reader.close() } def wrap[T](f: () => T): T = try { f() } catch { case e: IOException => throw new FileSystemException(e.getMessage, e) case e: IllegalStateException => throw new FileSystemException(e.getMessage, e) } } object FileSystem { def hdfsBuilder: HdfsFileSystem.Builder = HdfsFileSystem.builder def ftpBuilder: FtpFileSystem.Builder = FtpFileSystem.builder def smbBuilder: SmbFileSystem.Builder = SmbFileSystem.builder }