org.openjdk.jmh.infra.Blackhole Scala Examples

The following examples show how to use org.openjdk.jmh.infra.Blackhole. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ScriptEstimatorBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import java.util.concurrent.TimeUnit

import com.wavesplatform.lang.directives.values.V1
import com.wavesplatform.lang.utils
import com.wavesplatform.lang.v1.ScriptEstimatorBenchmark.St
import com.wavesplatform.lang.v1.estimator.v2.ScriptEstimatorV2
import monix.eval.Coeval
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class ScriptEstimatorBenchmark {
  @Benchmark
  def apply_test(st: St, bh: Blackhole): Unit = bh.consume(ScriptEstimatorV2(Set.empty, st.functionCosts, st.expr))
}

object ScriptEstimatorBenchmark {

  class St extends BigSum {
    val functionCosts: Map[FunctionHeader, Coeval[Long]] = utils.functionCosts(V1)
  }

} 
Example 2
Source File: Decoding.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package benchmarks

import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
import java.util.Collections

import benchmarks.record._
import com.sksamuel.avro4s._
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.{DecoderFactory, EncoderFactory}
import org.apache.avro.util.ByteBufferInputStream
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

object Decoding extends BenchmarkHelpers {
  @State(Scope.Thread)
  class Setup {
    val avroBytes = {
      import benchmarks.record.generated.AttributeValue._
      import benchmarks.record.generated._
      new RecordWithUnionAndTypeField(new ValidInt(255, t)).toByteBuffer
    }

    val avro4sBytes = encode(RecordWithUnionAndTypeField(AttributeValue.Valid[Int](255, t)))

    val (handrolledDecoder, handrolledReader) = {
      import benchmarks.handrolled_codecs._
      implicit val codec: Codec[AttributeValue[Int]] = AttributeValueCodec[Int]
      implicit val schemaFor: SchemaFor[AttributeValue[Int]] = SchemaFor[AttributeValue[Int]](codec.schema)
      val recordSchemaFor = SchemaFor[RecordWithUnionAndTypeField]
      val decoder = Decoder[RecordWithUnionAndTypeField].withSchema(recordSchemaFor)
      val reader = new GenericDatumReader[GenericRecord](recordSchemaFor.schema)
      (decoder, reader)
    }

    val (avro4sDecoder, avro4sReader) = {
      val decoder = Decoder[RecordWithUnionAndTypeField]
      val reader = new GenericDatumReader[GenericRecord](decoder.schema)
      (decoder, reader)
    }
  }

  def encode[T: Encoder: SchemaFor](value: T): ByteBuffer = {
    val outputStream = new ByteArrayOutputStream(512)
    val encoder = Encoder[T]
    val schema = AvroSchema[T]
    val record = encoder.encode(value).asInstanceOf[GenericRecord]
    val writer = new GenericDatumWriter[GenericRecord](schema)
    val enc = EncoderFactory.get().directBinaryEncoder(outputStream, null)
    writer.write(record, enc)
    ByteBuffer.wrap(outputStream.toByteArray)
  }
}

class Decoding extends CommonParams with BenchmarkHelpers {

  import Decoding._

  def decode[T](bytes: ByteBuffer, decoder: Decoder[T], reader: GenericDatumReader[GenericRecord]): T = {
    val dec =
      DecoderFactory.get().binaryDecoder(new ByteBufferInputStream(Collections.singletonList(bytes.duplicate)), null)
    val record = reader.read(null, dec)
    decoder.decode(record)
  }


  @Benchmark
  def avroSpecificRecord(setup: Setup, blackhole: Blackhole) = {
    import benchmarks.record.generated._
    blackhole.consume(RecordWithUnionAndTypeField.fromByteBuffer(setup.avroBytes.duplicate))
  }

  @Benchmark
  def avro4sHandrolled(setup: Setup, blackhole: Blackhole) =
    blackhole.consume(decode(setup.avro4sBytes, setup.handrolledDecoder, setup.handrolledReader))

  @Benchmark
  def avro4sGenerated(setup: Setup, blackhole: Blackhole) =
    blackhole.consume(decode(setup.avro4sBytes, setup.avro4sDecoder, setup.avro4sReader))
} 
Example 3
Source File: TransformerBenchmark.scala    From featran   with Apache License 2.0 5 votes vote down vote up
package com.spotify.featran.jmh

import java.util.concurrent.TimeUnit

import com.spotify.featran.transformers._
import com.spotify.featran._
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class TransformerBenchmark {

  import Fixtures._

  def benchmark[A](transformer: Transformer[A, _, _], bh: Blackhole)
                  (implicit fixture: Seq[A]): Seq[Unit] = {
    implicit val fb: FeatureBuilder[Unit] = new NoOpFeatureBuilder(bh)
    val fe = FeatureSpec.of[A].required(identity)(transformer).extract(fixture)
    fe.featureValues[Unit]
  }

  // TODO: figure out how to verify that all transformers are covered

  @Benchmark def binarizer(bh: Blackhole): Seq[Unit] = benchmark(Binarizer("t"), bh)
  @Benchmark def bucketizer(bh: Blackhole): Seq[Unit] =
    benchmark(Bucketizer("t", Array(0.0, 250.0, 500.0, 750.0, 1000.0)), bh)
  @Benchmark def hashNHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(HashNHotEncoder("t"), bh)
  @Benchmark def hashNHotWeightedEncoder(bh: Blackhole): Seq[Unit] =
    benchmark(HashNHotWeightedEncoder("t"), bh)
  @Benchmark def hashOneHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(HashOneHotEncoder("t"), bh)
  @Benchmark def heavyHitters(bh: Blackhole): Seq[Unit] = benchmark(HeavyHitters("t", 100), bh)
  @Benchmark def identityB(bh: Blackhole): Seq[Unit] = benchmark(Identity("t"), bh)
  @Benchmark def maxAbsScaler(bh: Blackhole): Seq[Unit] = benchmark(MaxAbsScaler("t"), bh)
  @Benchmark def mdl(bh: Blackhole): Seq[Unit] = benchmark(MDL[String]("t"), bh)
  @Benchmark def minMaxScaler(bh: Blackhole): Seq[Unit] = benchmark(MinMaxScaler("t"), bh)
  @Benchmark def nGrams(bh: Blackhole): Seq[Unit] = benchmark(NGrams("t"), bh)
  @Benchmark def nHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(NHotEncoder("t"), bh)
  @Benchmark def nHotWeightedEncoder(bh: Blackhole): Seq[Unit] =
    benchmark(NHotWeightedEncoder("t"), bh)
  @Benchmark def normalizer(bh: Blackhole): Seq[Unit] = benchmark(Normalizer("t"), bh)
  @Benchmark def oneHotEncoder(bh: Blackhole): Seq[Unit] = benchmark(OneHotEncoder("t"), bh)
  @Benchmark def polynomialExpansion(bh: Blackhole): Seq[Unit] =
    benchmark(PolynomialExpansion("t"), bh)
  @Benchmark def quantileDiscretizer(bh: Blackhole): Seq[Unit] =
    benchmark(QuantileDiscretizer("t"), bh)
  @Benchmark def standardScaler(bh: Blackhole): Seq[Unit] = benchmark(StandardScaler("t"), bh)
  @Benchmark def topNOneHotEncoder(bh: Blackhole): Seq[Unit] =
    benchmark(TopNOneHotEncoder("t", 100), bh)
  @Benchmark def vectorIdentity(bh: Blackhole): Seq[Unit] =
    benchmark(VectorIdentity[Array]("t"), bh)
  @Benchmark def vonMisesEvaluator(bh: Blackhole): Seq[Unit] =
    benchmark(VonMisesEvaluator("t", 100.0, 0.001, Array(1.0, 2.0, 3.0, 4.0, 5.0)), bh)

}

private object Fixtures {
  implicit val doubles: Seq[Double] = (0 until 1000).map(_.toDouble)
  implicit val labels: Seq[String] = (0 until 1000).map(x => "l" + (x % 50))
  implicit val mdlRecords: Seq[MDLRecord[String]] =
    (0 until 1000).map(x => MDLRecord((x % 3).toString, x.toDouble))
  implicit val nLabels: Seq[Seq[String]] =
    (0 until 1000).map(x => (0 until (x % 50 + 1)).map("l" + _))
  implicit val nWeightedLabels: Seq[Seq[WeightedLabel]] = nLabels.map(_.map(WeightedLabel(_, 1.0)))
  implicit val vectors: Seq[Array[Double]] = (0 until 1000).map(x => Array.fill(10)(x / 1000.0))
}

private class NoOpFeatureBuilder(val bh: Blackhole) extends FeatureBuilder[Unit] {
  override def init(dimension: Int): Unit = bh.consume(dimension)
  override def result: Unit = bh.consume(Unit)
  override def add(name: String, value: Double): Unit = {
    bh.consume(name)
    bh.consume(value)
  }
  override def skip(): Unit = bh.consume(Unit)
  override def newBuilder: FeatureBuilder[Unit] = new NoOpFeatureBuilder(bh)
} 
Example 4
Source File: SmartNoSmartBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.serialization.protobuf

import java.util.concurrent.TimeUnit

import com.wavesplatform.account.{AddressScheme, PublicKey}
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils._
import com.wavesplatform.serialization.protobuf.SmartNoSmartBenchmark.ExchangeTransactionSt
import com.wavesplatform.transaction.assets.exchange._
import com.wavesplatform.transaction.{Proofs, TxVersion}
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

//noinspection ScalaStyle
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class SmartNoSmartBenchmark {
  @Benchmark
  def smartExchangeTX_test(st: ExchangeTransactionSt, bh: Blackhole): Unit = {
    import st._
    val exchangeTransaction = ExchangeTransaction.create(TxVersion.V2, buy, sell, 2, 5000000000L, 1, 1, 1, 1526992336241L, proofs)
    bh.consume(exchangeTransaction.explicitGet())
  }

  @Benchmark
  def unsafeExchangeTX_test(st: ExchangeTransactionSt, bh: Blackhole): Unit = {
    import st._
    val exchangeTransaction = ExchangeTransaction(TxVersion.V2, buy, sell, 2, 5000000000L, 1, 1, 1, 1526992336241L, proofs, AddressScheme.current.chainId)
    bh.consume(exchangeTransaction)
  }
}

object SmartNoSmartBenchmark {
  @State(Scope.Benchmark)
  class ExchangeTransactionSt {
    val buy = Order(
      TxVersion.V2,
      PublicKey.fromBase58String("BqeJY8CP3PeUDaByz57iRekVUGtLxoow4XxPvXfHynaZ").explicitGet(),
      PublicKey.fromBase58String("Fvk5DXmfyWVZqQVBowUBMwYtRAHDtdyZNNeRrwSjt6KP").explicitGet(),
      AssetPair.createAssetPair("WAVES", "9ZDWzK53XT5bixkmMwTJi2YzgxCqn5dUajXFcT2HcFDy").get,
      OrderType.BUY,
      2,
      6000000000L,
      1526992336241L,
      1529584336241L,
      1,
      proofs = Proofs(Seq(ByteStr.decodeBase58("2bkuGwECMFGyFqgoHV4q7GRRWBqYmBFWpYRkzgYANR4nN2twgrNaouRiZBqiK2RJzuo9NooB9iRiuZ4hypBbUQs").get))
    )

    val sell = Order(
      TxVersion.V1,
      PublicKey.fromBase58String("7E9Za8v8aT6EyU1sX91CVK7tWUeAetnNYDxzKZsyjyKV").explicitGet(),
      PublicKey.fromBase58String("Fvk5DXmfyWVZqQVBowUBMwYtRAHDtdyZNNeRrwSjt6KP").explicitGet(),
      AssetPair.createAssetPair("WAVES", "9ZDWzK53XT5bixkmMwTJi2YzgxCqn5dUajXFcT2HcFDy").get,
      OrderType.SELL,
      3,
      5000000000L,
      1526992336241L,
      1529584336241L,
      2,
      proofs = Proofs(ByteStr.decodeBase58("2R6JfmNjEnbXAA6nt8YuCzSf1effDS4Wkz8owpCD9BdCNn864SnambTuwgLRYzzeP5CAsKHEviYKAJ2157vdr5Zq").get)
    )

    val proofs = Proofs(Seq(ByteStr.decodeBase58("5NxNhjMrrH5EWjSFnVnPbanpThic6fnNL48APVAkwq19y2FpQp4tNSqoAZgboC2ykUfqQs9suwBQj6wERmsWWNqa").get))
  }
} 
Example 5
Source File: ProtoBufBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.serialization.protobuf

import java.util.concurrent.TimeUnit

import com.wavesplatform.account.PublicKey
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.protobuf.transaction.PBTransactions
import com.wavesplatform.transaction.Asset.Waves
import com.wavesplatform.transaction.Proofs
import com.wavesplatform.transaction.transfer.MassTransferTransaction
import com.wavesplatform.transaction.transfer.MassTransferTransaction.Transfer
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import com.wavesplatform.common.utils.EitherExt2

//noinspection ScalaStyle
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class ProtoBufBenchmark {

  @Benchmark
  def serializeMassTransferPB_test(bh: Blackhole): Unit = {
    val vanillaTx = {
      val transfers = MassTransferTransaction
        .parseTransfersList(
          List(Transfer("3N5GRqzDBhjVXnCn44baHcz2GoZy5qLxtTh", 100000000L), Transfer("3N5GRqzDBhjVXnCn44baHcz2GoZy5qLxtTh", 200000000L))
        )
        .explicitGet()

      MassTransferTransaction
        .create(
          1.toByte,
          PublicKey.fromBase58String("FM5ojNqW7e9cZ9zhPYGkpSP1Pcd8Z3e3MNKYVS5pGJ8Z").explicitGet(),
          Waves,
          transfers,
          200000,
          1518091313964L,
          ByteStr.decodeBase58("59QuUcqP6p").get,
          Proofs(Seq(ByteStr.decodeBase58("FXMNu3ecy5zBjn9b69VtpuYRwxjCbxdkZ3xZpLzB8ZeFDvcgTkmEDrD29wtGYRPtyLS3LPYrL2d5UM6TpFBMUGQ").get))
        )
        .explicitGet()
    }

    val tx = PBTransactions.protobuf(vanillaTx)
    bh.consume(tx.toByteArray)
  }

  @Benchmark
  def serializeMassTransferVanilla_test(bh: Blackhole): Unit = {
    val vanillaTx = {
      val transfers = MassTransferTransaction
        .parseTransfersList(
          List(Transfer("3N5GRqzDBhjVXnCn44baHcz2GoZy5qLxtTh", 100000000L), Transfer("3N5GRqzDBhjVXnCn44baHcz2GoZy5qLxtTh", 200000000L))
        )
        .explicitGet()

      MassTransferTransaction
        .create(
          1.toByte,
          PublicKey.fromBase58String("FM5ojNqW7e9cZ9zhPYGkpSP1Pcd8Z3e3MNKYVS5pGJ8Z").explicitGet(),
          Waves,
          transfers,
          200000,
          1518091313964L,
          ByteStr.decodeBase58("59QuUcqP6p").get,
          Proofs(Seq(ByteStr.decodeBase58("FXMNu3ecy5zBjn9b69VtpuYRwxjCbxdkZ3xZpLzB8ZeFDvcgTkmEDrD29wtGYRPtyLS3LPYrL2d5UM6TpFBMUGQ").get))
        )
        .explicitGet()
    }

    bh.consume(vanillaTx.bytes())
  }
} 
Example 6
Source File: VerifierLoggerBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.transaction.smart

import java.io.BufferedWriter
import java.nio.file.{Files, Path, Paths}
import java.util.concurrent.TimeUnit

import cats.Id
import com.wavesplatform.account.KeyPair
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils._
import com.wavesplatform.lang.v1.compiler.Terms
import com.wavesplatform.lang.v1.compiler.Terms.{CONST_BOOLEAN, EVALUATED}
import com.wavesplatform.lang.v1.evaluator.Log
import com.wavesplatform.lang.v1.evaluator.ctx.impl.waves.Bindings
import com.wavesplatform.state.BinaryDataEntry
import com.wavesplatform.transaction.DataTransaction
import com.wavesplatform.transaction.smart.VerifierLoggerBenchmark.BigLog
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.MILLISECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class VerifierLoggerBenchmark {

  @Benchmark
  def verifierLogged(bh: Blackhole, log: BigLog): Unit = {
    val logs = Verifier.buildLogs("id", log.value)
    bh.consume(log.writer.write(logs))
  }
}

object VerifierLoggerBenchmark {

  @State(Scope.Benchmark)
  class BigLog {

    val resultFile: Path       = Paths.get("log.txt")
    val writer: BufferedWriter = Files.newBufferedWriter(resultFile)

    private val dataTx: DataTransaction = DataTransaction
      .selfSigned(1.toByte, KeyPair(Array[Byte]()), (1 to 4).map(i => BinaryDataEntry(s"data$i", ByteStr(Array.fill(1024 * 30)(1)))).toList, 100000000, 0)
      .explicitGet()

    private val dataTxObj: Terms.CaseObj = Bindings.transactionObject(
      RealTransactionWrapper(dataTx, ???, ???, ???).explicitGet(),
      proofsEnabled = true
    )

    val value: (Log[Id], Either[String, EVALUATED]) =
      (
        List.fill(500)("txVal" -> Right(dataTxObj)),
        Right(CONST_BOOLEAN(true))
      )

    @TearDown
    def deleteFile(): Unit = {
      Files.delete(resultFile)
      writer.close()
    }
  }
} 
Example 7
Source File: MerkleBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.transaction.merkle

import java.util.concurrent.TimeUnit

import com.wavesplatform.common.merkle.Merkle
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

import scala.util.Random

//noinspection ScalaStyle
@State(Scope.Thread)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class MerkleBenchmark {

  val messageSize = 2048
  val evenSize    = 2048
  val oddSize     = 2047

  val messagesEven: Seq[Array[Byte]] = (0 until evenSize).map { _ =>
    val message = new Array[Byte](messageSize)
    Random.nextBytes(message)
    message
  }

  val messagesOdd: Seq[Array[Byte]] = (0 until oddSize).map { _ =>
    val message = new Array[Byte](messageSize)
    Random.nextBytes(message)
    message
  }

  val levelsEven: Seq[Seq[Array[Byte]]] = Merkle.mkLevels(messagesEven)
  val levelsOdd: Seq[Seq[Array[Byte]]]  = Merkle.mkLevels(messagesOdd)

  val messageIdxEven: Int            = Random.nextInt(evenSize)
  val messageIdxOdd: Int             = Random.nextInt(oddSize)
  val messageDigestEven: Array[Byte] = Merkle.hash(messagesEven(messageIdxEven))
  val messageDigestOdd: Array[Byte]  = Merkle.hash(messagesOdd(messageIdxOdd))
  val proofEven: Seq[Array[Byte]]    = Merkle.mkProofs(messageIdxEven, levelsEven)
  val proofOdd: Seq[Array[Byte]]     = Merkle.mkProofs(messageIdxOdd, levelsOdd)

  @Benchmark
  def merkleMkLevelsEven_test(bh: Blackhole): Unit = {
    val tree = Merkle.mkLevels(messagesEven)
    bh.consume(tree)
  }

  @Benchmark
  def merkleMkProofEven_test(bh: Blackhole): Unit = {
    val proof = Merkle.mkProofs(Random.nextInt(evenSize), levelsEven)
    bh.consume(proof)
  }

  @Benchmark
  def merkleVerifyProofEven_test(bh: Blackhole): Unit = {
    val result = Merkle.verify(messageDigestEven, messageIdxEven, proofEven, levelsEven.head.head)
    bh.consume(result)
  }

  @Benchmark
  def merkleMkLevelsOdd_test(bh: Blackhole): Unit = {
    val tree = Merkle.mkLevels(messagesOdd)
    bh.consume(tree)
  }

  @Benchmark
  def merkleMkProofOdd_test(bh: Blackhole): Unit = {
    val proof = Merkle.mkProofs(Random.nextInt(oddSize), levelsOdd)
    bh.consume(proof)
  }

  @Benchmark
  def merkleVerifyProofOdd_test(bh: Blackhole): Unit = {
    val result = Merkle.verify(messageDigestOdd, messageIdxOdd, proofOdd, levelsOdd.head.head)
    bh.consume(result)
  }
} 
Example 8
Source File: CompilerBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import java.util.concurrent.TimeUnit

import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.lang.v1.CompilerBenchmark.St
import com.wavesplatform.lang.v1.estimator.v2.ScriptEstimatorV2
import com.wavesplatform.transaction.smart.script.ScriptCompiler
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class CompilerBenchmark {

  @Benchmark
  def serialize_test(st: St, bh: Blackhole): Unit = bh.consume(ScriptCompiler(st.scriptString, isAssetScript = false, ScriptEstimatorV2).explicitGet()._1)
}

object CompilerBenchmark {

  @State(Scope.Benchmark)
  class St {
    val scriptString: String =
      """
        |let alicePubKey  = base58'B1Yz7fH1bJ2gVDjyJnuyKNTdMFARkKEpV'
        |let bobPubKey    = base58'7hghYeWtiekfebgAcuCg9ai2NXbRreNzc'
        |let cooperPubKey = base58'BVqYXrapgJP9atQccdBPAgJPwHDKkh6A8'
        |
        |let aliceSigned  = if(sigVerify(tx.bodyBytes, tx.proofs[0], alicePubKey  )) then 1 else 0
        |let bobSigned    = if(sigVerify(tx.bodyBytes, tx.proofs[1], bobPubKey    )) then 1 else 0
        |let cooperSigned = if(sigVerify(tx.bodyBytes, tx.proofs[2], cooperPubKey )) then 1 else 0
        |
        |aliceSigned + bobSigned + cooperSigned >= 2
      """.stripMargin
  }
} 
Example 9
Source File: ListIndexOfBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import java.util.concurrent.TimeUnit

import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.lang.v1.EnvironmentFunctionsBenchmark.{curve25519, randomBytes}
import com.wavesplatform.lang.v1.ListIndexOfBenchmark.{CurveSt32k, ListIndexOfSt}
import com.wavesplatform.lang.v1.compiler.Terms.{CONST_STRING, EVALUATED}
import com.wavesplatform.lang.v1.evaluator.ctx.impl.PureContext
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scorex.crypto.signatures.{Curve25519, Signature}

@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 30)
@Measurement(iterations = 30)
class ListIndexOfBenchmark {
  @Benchmark
  def indexOfMaxSizeElementInMaxCmpWeightElementsList(st: ListIndexOfSt, bh: Blackhole): Unit =
    bh.consume(st.indexOf(st.listWithMaxCmpWeightElements, st.maxSizeElementToFound))

  @Benchmark
  def indexOfMaxCmpWeightElementInMaxSizeElementsList(st: ListIndexOfSt, bh: Blackhole): Unit =
    bh.consume(st.indexOf(st.listWithMaxSizeElements, st.maxCmpWeightElementToFound))

  @Benchmark
  def lastIndexOfMaxSizeElementInMaxCmpWeightElementsList(st: ListIndexOfSt, bh: Blackhole): Unit =
    bh.consume(st.lastIndexOf(st.listWithMaxCmpWeightElements, st.maxSizeElementToFound))

  @Benchmark
  def lastIndexOfMaxCmpWeightElementInMaxSizeElementsList(st: ListIndexOfSt, bh: Blackhole): Unit =
    bh.consume(st.lastIndexOf(st.listWithMaxSizeElements, st.maxCmpWeightElementToFound))

  @Benchmark
  def sigVerify32Kb(st: CurveSt32k, bh: Blackhole): Unit =
    bh.consume(Curve25519.verify(Signature @@ st.signature, st.message, st.publicKey))
}

object ListIndexOfBenchmark {
  @State(Scope.Benchmark)
  class ListIndexOfSt {
    val maxCmpWeightElementToFound   = CONST_STRING("a" * ContractLimits.MaxCmpWeight.toInt).explicitGet()
    val maxSizeElementToFound        = CONST_STRING("a" * 150 * 1024).explicitGet()
    val listWithMaxCmpWeightElements = IndexedSeq.fill(1000)(CONST_STRING("a" * (ContractLimits.MaxCmpWeight.toInt - 1) + "b").explicitGet())
    val listWithMaxSizeElements      = IndexedSeq.fill(1000)(CONST_STRING(("a" * (150 * 1024 - 1)) + "b").explicitGet())

    def indexOf(list: Seq[EVALUATED], element: EVALUATED): Either[String, EVALUATED] =
      PureContext.genericListIndexOf(element, list.indexOf, list.indexWhere)

    def lastIndexOf(list: Seq[EVALUATED], element: EVALUATED): Either[String, EVALUATED] =
      PureContext.genericListIndexOf(element, list.lastIndexOf(_), list.lastIndexWhere)
  }

  @State(Scope.Benchmark)
  class CurveSt32k {
    val (privateKey, publicKey) = curve25519.generateKeypair
    val message                 = randomBytes(32 * 1024)
    val signature               = curve25519.sign(privateKey, message)
  }
} 
Example 10
Source File: LangSerdeBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import java.util.concurrent.TimeUnit

import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.lang.v1.LangSerdeBenchmark.St
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class LangSerdeBenchmark {

  @Benchmark
  def serialize_test(st: St, bh: Blackhole): Unit = bh.consume(Serde.serialize(st.expr))

  @Benchmark
  def deserialize_test(st: St, bh: Blackhole): Unit = bh.consume(Serde.deserialize(st.serializedExpr).explicitGet())

}

object LangSerdeBenchmark {

  @State(Scope.Benchmark)
  class St extends BigSum {
    val serializedExpr: Array[Byte] = Serde.serialize(expr)
  }

} 
Example 11
Source File: ListMinMaxBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import java.util.concurrent.TimeUnit

import com.wavesplatform.lang.v1.EnvironmentFunctionsBenchmark.{curve25519, randomBytes}
import com.wavesplatform.lang.v1.ListIndexOfBenchmark.CurveSt32k
import com.wavesplatform.lang.v1.ListMinMaxBenchmark.ListMinMaxSt
import com.wavesplatform.lang.v1.evaluator.ctx.impl.PureContext
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scorex.crypto.signatures.{Curve25519, Signature}

@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 30)
@Measurement(iterations = 30)
class ListMinMaxBenchmark {
  @Benchmark
  def max(st: ListMinMaxSt, bh: Blackhole): Unit =
    bh.consume(st.list.max)

  @Benchmark
  def min(st: ListMinMaxSt, bh: Blackhole): Unit =
    bh.consume(st.list.min)

  @Benchmark
  def sigVerify32Kb(st: CurveSt32k, bh: Blackhole): Unit =
    bh.consume(Curve25519.verify(Signature @@ st.signature, st.message, st.publicKey))
}

object ListMinMaxBenchmark {
  @State(Scope.Benchmark)
  class ListMinMaxSt {
    val list = (Long.MinValue to Long.MinValue + PureContext.MaxListLengthV4).toVector
  }

  @State(Scope.Benchmark)
  class CurveSt32k {
    val (privateKey, publicKey) = curve25519.generateKeypair
    val message                 = randomBytes(32 * 1024)
    val signature               = curve25519.sign(privateKey, message)
  }
} 
Example 12
Source File: Encoding.scala    From avro4s   with Apache License 2.0 5 votes vote down vote up
package benchmarks

import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer

import benchmarks.record._
import com.sksamuel.avro4s._
import org.apache.avro.generic.{GenericDatumWriter, GenericRecord}
import org.apache.avro.io.EncoderFactory
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

object Encoding extends BenchmarkHelpers {

  @State(Scope.Thread)
  class Setup {
    val record = RecordWithUnionAndTypeField(AttributeValue.Valid[Int](255, t))

    val specificRecord = {
      import benchmarks.record.generated.AttributeValue._
      import benchmarks.record.generated._
      new RecordWithUnionAndTypeField(new ValidInt(255, t))
    }

    val (avro4sEncoder, avro4sWriter) = {
      val schema = AvroSchema[RecordWithUnionAndTypeField]
      val encoder = Encoder[RecordWithUnionAndTypeField]
      val writer = new GenericDatumWriter[GenericRecord](schema)
      (encoder, writer)
    }

    val (handrolledEncoder, handrolledWriter) = {
      import benchmarks.handrolled_codecs._
      implicit val codec: AttributeValueCodec[Int] = AttributeValueCodec[Int]
      implicit val schemaForValid = codec.schemaForValid
      val schema = AvroSchema[RecordWithUnionAndTypeField]
      val encoder = Encoder[RecordWithUnionAndTypeField]
      val writer = new GenericDatumWriter[GenericRecord](schema)
      (encoder, writer)
    }

  }
}

class Encoding extends CommonParams with BenchmarkHelpers {

  import Encoding._

  def encode[T](value: T, encoder: Encoder[T], writer: GenericDatumWriter[GenericRecord]): ByteBuffer = {
    val outputStream = new ByteArrayOutputStream(512)
    val record = encoder.encode(value).asInstanceOf[GenericRecord]
    val enc = EncoderFactory.get().directBinaryEncoder(outputStream, null)
    writer.write(record, enc)
    ByteBuffer.wrap(outputStream.toByteArray)
  }


  @Benchmark
  def avroSpecificRecord(setup: Setup, blackhole: Blackhole) =
    blackhole.consume(setup.specificRecord.toByteBuffer)

  @Benchmark
  def avro4sGenerated(setup: Setup, blackhole: Blackhole) =
    blackhole.consume(encode(setup.record, setup.avro4sEncoder, setup.avro4sWriter))

  @Benchmark
  def avro4sHandrolled(setup: Setup, blackhole: Blackhole) =
    blackhole.consume(encode(setup.record, setup.handrolledEncoder, setup.handrolledWriter))
} 
Example 13
Source File: MakeStringBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import java.util.concurrent.TimeUnit

import MakeStringBenchmark.{CurveSt32k, MakeStringSt}
import com.wavesplatform.lang.v1.EnvironmentFunctionsBenchmark.{curve25519, randomBytes}
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scorex.crypto.signatures.{Curve25519, Signature}

@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 30)
@Measurement(iterations = 30)
class MakeStringBenchmark {

  @Benchmark
  def makeString(st: MakeStringSt, bh: Blackhole): Unit =
    bh.consume(st.stringList.mkString(","))

  @Benchmark
  def sigVerify32Kb(st: CurveSt32k, bh: Blackhole): Unit =
    bh.consume(Curve25519.verify(Signature @@ st.signature, st.message, st.publicKey))
}

object MakeStringBenchmark {
  @State(Scope.Benchmark)
  class MakeStringSt {
    val stringList = List.fill(1000)("a" * 33)
  }

  @State(Scope.Benchmark)
  class CurveSt32k {
    val (privateKey, publicKey) = curve25519.generateKeypair
    val message                 = randomBytes(32 * 1024)
    val signature               = curve25519.sign(privateKey, message)
  }
} 
Example 14
Source File: AddressBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common

import java.util.concurrent.TimeUnit

import com.wavesplatform.account.{Address, PublicKey}
import com.wavesplatform.common.AddressBenchmark.{CachedAddress, PublicKeySt, UncachedAddress}
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

import scala.util.Random

@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(4)
@Fork(1)
@Warmup(iterations = 5)
@Measurement(iterations = 5)
class AddressBenchmark {
  @Benchmark
  def createAddress_test(st: PublicKeySt, bh: Blackhole): Unit = {
    bh.consume(new UncachedAddress(st.publicKey).toAddress)
  }

  @Benchmark
  def readAddress_test(st: PublicKeySt, bh: Blackhole): Unit = {
    val addr = new UncachedAddress(st.publicKey)
    (0 to 1000).foreach(_ => bh.consume(addr.toAddress))
  }

  @Benchmark
  def readCachedAddress_test(st: PublicKeySt, bh: Blackhole): Unit = {
    val addr = new CachedAddress(st.publicKey)
    (0 to 1000).foreach(_ => bh.consume(addr.toAddress))
  }
}

object AddressBenchmark {
  @State(Scope.Benchmark)
  class PublicKeySt {
    val publicKey = new Array[Byte](scorex.crypto.signatures.Curve25519.KeyLength25519)
    Random.nextBytes(publicKey)
  }

  class UncachedAddress(publicKey: Array[Byte]) {
    def toAddress = Address.fromPublicKey(PublicKey(publicKey))
  }

  class CachedAddress(publicKey: Array[Byte]) {
    lazy val toAddress = Address.fromPublicKey(PublicKey(publicKey))
  }
} 
Example 15
Source File: ZkSnarkBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common

import java.util.Base64
import java.util.concurrent.TimeUnit

import com.wavesplatform.common.ZkSnarkBenchmark.{CurveSt, Groth16St}
import com.wavesplatform.lang.v1.EnvironmentFunctionsBenchmark.{curve25519, randomBytes}
import com.wavesplatform.zwaves.bls12.Groth16
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scorex.crypto.signatures.{Curve25519, Signature}

@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 20)
class ZkSnarkBenchmark {
  @Benchmark
  def groth16(st: Groth16St, bh: Blackhole): Unit =
    bh.consume(Groth16.verify(st.vk, st.proof, st.inputs))

  // for comparison
  @Benchmark
  def sigVerify(st: CurveSt, bh: Blackhole): Unit =
    bh.consume(Curve25519.verify(Signature @@ st.signature, st.message, st.publicKey))
}

object ZkSnarkBenchmark {

  @State(Scope.Benchmark)
  class Groth16St {
    val vk: Array[Byte] = Base64.getDecoder.decode("s6Fw3I8k6rDlE1BJcs833aBONDLHtvx2AWD92/r2qWWc05SGxm+beILW+G2KrB4QpmeojJCTEDqiFAZ4tCkLGppmaiBNRunPepzNtXY+1TPiqRsDdpHP86U7lgng2OAEADUDAFAVbNLRMNzYHbDf7LRppGh2xiQILqHz0OqLU9dcS9vuNQVuVYIQqcUGoObYkPiXh4gEjeojZ/ZHYzBgRMzlqI2cyNPR1uFYHYBgrUtOcOoIueQqyvgii9NynelqEJHSYXFkhefEbegIXinjA9lHKkFuhkHRPW1lqIU7uMofmLTOEON7XyjTZf7HvJ0IoNU368JBygoz97xgjmTGe2R+F2M+tQjnA0qSNV4ve9/RyOmUZLIbvHPnC+HUCFnwGFuJF0LLkNL+LizhD+nRa6uBFgOGNKJo88XwRIjAC+KZOCP3nrxgegS4m/bRxmG6o6a03AlETTySuenJqp79DS7pTiBLmmKi0qCnOMyeiC5N25n4wKkCPDqUxeDfYBlDlRiGpRh8Lt/jHyJAAMdaUQK/bbla1wfBBSkq+WIqgzQRpFAOlP20GgbLlYitIeEpigMdNI7yna6gF/H/yj5AyoyctmX0JaRGs0JMbJXH5LSQjrnds41/8O/EoJzIVvGJt8MBfhtjM8XqRymtkjvo0c7N5PHw3mcVcJqQ5+GMytQ/IhIi7SrIqlesrpbWkG0koDcKMhIZM/EqXWQQApIp2B0w0LyJOjeRe3vg6R08QOJmc/2OiquIX2+3wo9wgmwzk6XX2gc8LF8qWr4m7Kk6qt2OIk2tLZK+2FR7l1+AkGEJ9rAh3KZ01rmTRRQk7BdXkNtxldeVfqs5CH7Tik8jGPEzpq06Aqh56GeG8+JZ+0MQpnidx2WwcNP/RwNQ2K0eiWrcvf2b8Zwq7fan2EmPIckcsQ4TDtcUYlZ/jtv8oQ8AbYVbjxCsb2+ANMbsiGfKojIKcDUqtiWCKA0A15oYvJ1+ypYRFgVFV4W9J0hTDNOAULv4Ir5pjtESEnbipEYilmSIuVIxoxBAQfGdYLfn7ktLcwpTBglFWQD40MGpY52ZWhOuQdGAhb2hiYHY8LLaqEpQKPlE6XjDbMkF32NoyNWLaJaankwoP0dKhxPec1cUp8DmzBDEzA/r7ct6e1TkkjFUNVdbmrPjaH4oywuOYrBjJl4LqS6sn0YtDMfXnDMxbj9hHjyiCvvJzCZoQF4Usz+nxwys9J/ltRjeGofKgQoYD8c6vyib5Zep3swXIP96yRJ4EY9VLx4ZHrKiXmbBkoOsZdZuOScTRqxr2eWXlRZsydm//A7HROZx0LYll/UbASK4RIz5biDG2Z1AIg2bjfCCXX069KgUsnTsrwVlx3XhF/FFje42YP447PvcnEx8vWaXYMIo9nABOOKdZlOipw8mq+/bn1H53vUUYxmGghiJ+cCSNMPLrX8DLKYOL9x5dDcpt2MJWZ7mjQ+lTtUFoNvV8lzQXncyobLubjPaKeGlLA2vPRnmqQSYNZqp+/J6Z7dtIt1btoW30jt0OM8D")
    val proof: Array[Byte] = Base64.getDecoder.decode("tKrzmvZK5lnOj3xbe/3x1Yu8aqPxMnPOawFPM0JWDBH+WDFKBUwlToXaefpY7gkxg14eUONY5rJI3dj6abI7b6gZtgyVP7Xr1HXtMpDX9i5xs4kgXxCcCZ+1ZNox311LEJLQoctW/Qi042T0t1FQ9ZSWUlyZkSEb/l8fC8akq2oEDIFICs+PmuzGdgDuuHndlV09I+bY5hgNhRV1UvteD3W8m7Q0vGO/W/milDXu6u65gch97W2Wwwjj0Ags+j5l")
    val inputs: Array[Byte] = Base64.getDecoder.decode("GCddXmFUIRlXriGVQ1A3t6cwg0w4lmWXGqI+X3mMbmNiO51k5CvrWHd5GOwVb5eb5Imcjj55Q6oKQptRSZqe9TY//vzQ+VNG5bkLiCpTgphYwzXH/GFowK7AjCvy8YhQbFQ1V8sPbpjH8rYlxveMdHW41maMYoW6sJuHpoz1RnNkAyGKRQ0sRUD1n/ohaE/LVLnB4F3cN3cea3MdTmPgPGIWCE5a6oeQRGI/RpKH6kY2BMeuta4jrizVRrcIeFvtYu+j4v31CSmSxiCTHx2SqF/QcSJGnkkdSj9eIZkiO+VjqFi39hRbsfsMYCDUZ8P8QhrYWb+6cFErJ/1PLPjOKFkgBG5pxLo5ifZGjTL5kbtBD6kKritHOHRuANqarO40IiS1yffpaJVrXyJDy0rn2K79XQhyO85tWfK6oT1Q4GwYgxGI9rYyfuKaqrYUbIWPoDCwc9lmSYF9klmjn7LyNDymCC9mmzCiTz4ggzlrR/6DhmJGoDon3wZ5N0jhuCvBPog54HohNv84S1zZLtzEbmlRawFa/q1sl/h5Fl+9KQYyN3SWGN0YIOd3eqtu80UBL8YkOZs8BL6Fgb6KXGULo1VkAHR9q+RSurwCvMqpk/nPzFyZ4MviHxSAmSOUNN2cXzYONrxHZYXGRid1/YxUSukQ57OtwTXsUVIeYG/HTUI=")
  }

  @State(Scope.Benchmark)
  class MerkleSt {
    val root0: Array[Byte] = Base64.getDecoder.decode("YK6g9RXHtw3vE3zmjCGCdLqrij2BwvPCO0E8Bm977ks=")
    val proof0: Array[Byte] = Base64.getDecoder.decode("UU2z52vRH3FTCVwi/C7IIOFz40tZih3P1EarEoM2vsJXWjwtRvpdGjsQl5p/vr1IivKe7cYSdXYs3FcV2R3C2EdPc0IguQwK6bUbVXN1n12KfHPSNfp/V7v9nOWkNiTeA6kMim4Mlj3goV1XuHK9ubsawXZSuDi0HzYaWi5VmxZa4bAzJ4EbgH0sjapX75LRvwCJulQBTpfNKDwi0wG9tybgUmLm53jkOE0DwQaSXJhhY3VsvV1mWW+wQWS1hWjjcS4wHwHwhSb4iFsdV2IQKAD5D8jcBhYj1+yJPITGvKId5FHNHJjkkWFtOMS5BSwz1g5H4Eq1Io/0WVrFBQUk4wQIHxgOMwWRLy6J2RS4vJ5QahhUQZ/OdH3j5z4cMvYqLRaCf+usdX2aZFadMqCDG9FsoLrTwVyW06rB9ToYzhwSrg2XKBzdwwzCE7TXHXnD2Y9bkBty9ZvfSd4htJYFoRa3WJEM8VaOA05xwc0E4GLqTdv8ZC8iDuTQOybnvcRWOD9sWXPdTRunh5XPFwnFSEKCxsWP+D8z44ExsqHH8/IlHHxqhsXgvjP2vI8vJXc5NJHP9mx5Gx6g7xkx5of+bQpa8JJ8ad60KyR88UlAMtn0bZdt5Sdtu5WIb+dCYmYGNNiow56PBg+c6Vs6Oo8sCCsSoK8nXGplshzdBnOmL1YZy5jC13EDORWuYk1PRe1aDLzKzDs1VstRjDp2DWe43SXT+B4DIm4qzc/wElMkfG2QNwel3G75HuxHSxPCa0aLAdPlfFtiAp2Z+Obh5CRXoxZvFUkifIaZ9v/o3H2i6udcJ3y2E05NFCYkjmRHuYYG8/Ktry89FE+S5hKnRuopCgMsbtdafxKW/5qKMnqVNQlcIO4lxqOBW0n2wFm+hhaWXS4+xEl6TxrDyonRz8l3hA3OPrfNVwXSNGqFsBrM9dUpj1l7CcMQ1F2E0XrQoRpPepBvVK22iAjWznSS8HDwSwKtw82edGHgqSGZ2cIS5ebjT6K+RIIJheFH2Sc9zq2MPuhuh+kceQZtCuo0yB8MOlw9ILLfv4BUN+Cf0HXziMQFcMAMd0DDySspFqir0DAzXTz1rIYxQmkVGFOgRDn23ksNt8WxJZV+LorP/vPlSuwtG6XACWsJhWHwNkbgpp0cWdts/QnavsdFhzUwgZq9DLjqHAhz/y/fApNkky4Z51JonZ/lqemNehkT0CygII4iTrkVHZJ7ROUjUguhevoz8wqprf514DNsdYp85PS1j7NSnjoiMl/vdW9PETXxpg95cu7CprJcHE1cfxy/fxFG9jfY6KCw89Nq+mkGwNtwn0YVxyAKDRKimJJ62dK96S3JuVX7Ko9sbYF/VQcHN+VQ+lgWjSUadtDMrVNToNjSbopb0HJ5t6rIdWoX2fQVyLRBCpNpSX5sE0ITadD22N3A3Qyo0hTnjZQldD8ZFVHLlywFCg0K+hiI6ndBXwMcmidciV2Q/DKuOGPjDj9R5GwPllIMafePMeY0GK+yhqcAs3cxezhMIfV1W47DBAkbJr1wUtfxmcH7SaWk1coJam8Gd5m9P5a14khPVn3L+X7eZQMfbUzoVUx9RB4Qc1SGQug5Cyw38iinB6okVp8ZYgkm1R5wGMEbl3+26vRzWhbJkTxN3LLRfu2Bn5JVowFQLj0PbqGSZTJ1E7TIFpDfVd7HOwiDxAD2VsE8Segu5WVLdx9bxPT8r+KhkuuWtq2Mt6ii4R40z30oQ6tEB6IA5DLnelAmFImgKSa7IHAy0cog9A5Wowyd/6s4Pvifi1sjk/8UX4zX15cRdKIwIfdLzWA1viVAiQG/meoNFWr/npHWvg8nEiFLeD86vjQbHK9YEtKRZXTV+4NpnlrsZNpj9Odt8mcx9MuUSrgggRXDWz+xHAdVR28XoyBC0u5Xgm9KQ9ZVXSh9wW3flHA+BZgwJhgILEJKt27L+tJ2z0eFiy1I7Tcosf8CyGonXzg8Nu1cn5iB0YpqIGAGWdsyhIYS5OSWyxpCEGRj/t31sg02jNo8nvsi8c+gpKQmeRVar463OZil")
    val index = 23
    val elements1: Array[Byte] = Base64.getDecoder.decode("GrfbyxFPhfTsam3aY8yqeY072ZrT3DTO8SrSRg4nJZ9nF1OpFPuvXQlcbsqFrGUkgUlokbCealGEw0J8G/H2oWFOY3CHDDhqBQGzUzk+/R3uYljv0YS/Wnb41IKeDSzfDcrJ41FdxBXgptFlh+TM3OKlgJ2jSAF9mqE3v7dUD/1n6conMUMPB+yeP5fapHBGu2OtlDmiHjzuGG6xrrW7tW45mNToh8yTb+POgZP+IvCmf7b8Tzzs0Z9fv998Q5HdXwrH6ts1cC9GBn9GwlWiDxfbKHKE+XS86tNoGPje5MFE7fWtv5XKzkGirbKRuKsBrLDrwl4UwaruqMwk1jJ4jTnsYzLpaGW7nZ46g+Mx5THu8481Kl7zWTFyRXVLIlvCc3eI3oqsjglbnCZ/7xgG4mnlDWqKYBuWzkmm6pCXB0JV7p+/1G7KcT7SIYoWUdf/XPedxh3N3Qgp3xhyh2VpcAMiK9JrMi8j4EQEsxE9Qm58z8aZ+fGBDkEJPaX0TJ1mDQvC4jQH6Sw96KjRLwezomn2Y5rtXcsxqX5UEhuYk39cxPBUX1tC5ap9qn0IPJaQ1Aj7tZvKhm4H1z3zAdwILTzjXYFOE2NyGZub0DY0g16XZKzhOCrtyeSauxm2pRzjNOFKLyEnk8/8a4bAXfODxGA4tCoBlv8ixFGa1agonvVPMc9FP6/S/26YEL491LGZsnqQ5AvvPt1oe/TmcfK/P1AxgsO2chZPqwddisQNRinb8x5NE8ptEWh9He7uutE/LeAGQVN5BYNpEMijP1RDj1FQkDFgglvvh6rgqm7sZr4lh/8wInwzJMMVTN9Dw1uvRHI5Igir1gcvs0vbuqhAf1NRcY5tnD6PxSTl79M1SkTpg1MSmpYJcISdoFNmfq0LVRUyp47f6gjWt+qVkyJAe3xp0GaEmaMKvvj2xVxLdxRRlRRTYABMFjU63LBttYjn0e1qkAvv1JveIrreATBq5Q==")
  }

  @State(Scope.Benchmark)
  class CurveSt {
    val (privateKey, publicKey) = curve25519.generateKeypair
    val message                 = randomBytes(150 * 1024)
    val signature               = curve25519.sign(privateKey, message)
  }
} 
Example 16
Source File: ArrayCompareBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common

import java.util.concurrent.TimeUnit

import com.wavesplatform.common.ArrayCompareBenchmark.BytesSt
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

import scala.util.Random

//noinspection ScalaStyle
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@BenchmarkMode(Array(Mode.Throughput))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class ArrayCompareBenchmark {
  @Benchmark
  def sameElements_test(st: BytesSt, bh: Blackhole): Unit =
    bh.consume(st.bytes.sameElements(st.bytes1))

  @Benchmark
  def arraysEquals_test(st: BytesSt, bh: Blackhole): Unit =
    bh.consume(java.util.Arrays.equals(st.bytes, st.bytes1))
}

object ArrayCompareBenchmark {
  @State(Scope.Benchmark)
  class BytesSt {
    val bytes  = new Array[Byte](1024)
    val bytes1 = new Array[Byte](1024)

    Random.nextBytes(bytes)
    bytes.copyToArray(bytes1)
  }
} 
Example 17
Source File: EcrecoverBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common

import java.util.concurrent.TimeUnit

import com.wavesplatform.common.EcrecoverBenchmark.{CurveSt32k, EcrecoverSt1, EcrecoverSt2}
import com.wavesplatform.lang.Global
import com.wavesplatform.lang.v1.EnvironmentFunctionsBenchmark.{curve25519, randomBytes}
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scorex.util.encode.Base16
import scorex.crypto.hash.Keccak256
import scorex.crypto.signatures.{Curve25519, Signature}

@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 30)
@Measurement(iterations = 30)
class EcrecoverBenchmark {

  @Benchmark
  def ecrover1(st: EcrecoverSt1, bh: Blackhole): Unit =
    bh.consume(Global.ecrecover(st.messageHash, st.signature))

  @Benchmark
  def ecrover2(st: EcrecoverSt2, bh: Blackhole): Unit =
    bh.consume(Global.ecrecover(st.messageHash, st.signature))

  @Benchmark
  def sigVerify32Kb(st: CurveSt32k, bh: Blackhole): Unit =
    bh.consume(Curve25519.verify(Signature @@ st.signature, st.message, st.publicKey))
}

object EcrecoverBenchmark {
  @State(Scope.Benchmark)
  class EcrecoverSt1 {
    val signature = Base16.decode(
      "848ffb6a07e7ce335a2bfe373f1c17573eac320f658ea8cf07426544f2203e9d52dbba4584b0b6c0ed5333d84074002878082aa938fdf68c43367946b2f615d01b"
    ).get
    val message     = "i am the owner"
    val prefix      = "\u0019Ethereum Signed Message:\n" + message.length
    val messageHash = Keccak256.hash((prefix + message).getBytes)
  }

  @State(Scope.Benchmark)
  class EcrecoverSt2 {
    val signature = Base16.decode(
      "3b163bbd90556272b57c35d1185b46824f8e16ca229bdb36f8dfd5eaaee9420723ef7bc3a6c0236568217aa990617cf292b1bef1e7d1d936fb2faef3d846c5751b"
    ).get
    val message     = "what's up jim"
    val prefix      = "\u0019Ethereum Signed Message:\n" + message.length
    val messageHash = Keccak256.hash((prefix + message).getBytes)
  }

  @State(Scope.Benchmark)
  class CurveSt32k {
    val (privateKey, publicKey) = curve25519.generateKeypair
    val message                 = randomBytes(32 * 1024)
    val signature               = curve25519.sign(privateKey, message)
  }
} 
Example 18
Source File: SponsorshipMathBenchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common
import java.util.concurrent.TimeUnit

import com.wavesplatform.state.diffs.FeeValidation
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.MILLISECONDS)
@BenchmarkMode(Array(Mode.Throughput))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class SponsorshipMathBenchmark {
  @Benchmark
  def bigDecimal_test(bh: Blackhole): Unit = {
    def toWaves(assetFee: Long, sponsorship: Long): Long = {
      val waves = (BigDecimal(assetFee) * BigDecimal(FeeValidation.FeeUnit)) / BigDecimal(sponsorship)
      if (waves > Long.MaxValue) {
        throw new java.lang.ArithmeticException("Overflow")
      }
      waves.toLong
    }

    bh.consume(toWaves(100000, 100000000))
  }

  @Benchmark
  def bigInt_test(bh: Blackhole): Unit = {
    def toWaves(assetFee: Long, sponsorship: Long): Long = {
      val waves = BigInt(assetFee) * FeeValidation.FeeUnit / sponsorship
      waves.bigInteger.longValueExact()
    }

    bh.consume(toWaves(100000, 100000000))
  }
} 
Example 19
Source File: Base58Benchmark.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.common

import java.util.concurrent.{ThreadLocalRandom, TimeUnit}

import com.wavesplatform.common.Base58Benchmark.{Base58St, BytesSt}
import com.wavesplatform.common.utils.{Base58, FastBase58, StdBase58}
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@OutputTimeUnit(TimeUnit.SECONDS)
@BenchmarkMode(Array(Mode.Throughput))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class Base58Benchmark {
  @Benchmark
  def base58_fastEncode_test(st: BytesSt, bh: Blackhole): Unit = bh.consume(FastBase58.encode(st.bytes))

  @Benchmark
  def base58_encode_test(st: BytesSt, bh: Blackhole): Unit = bh.consume(StdBase58.encode(st.bytes))

  @Benchmark
  def base58_decode_test(st: Base58St, bh: Blackhole): Unit = bh.consume(StdBase58.decode(st.base58))

  @Benchmark
  def base58_fastDecode_test(st: Base58St, bh: Blackhole): Unit = bh.consume(FastBase58.decode(st.base58))
}

object Base58Benchmark {
  def randomBytes(length: Int): Array[Byte] = {
    val bytes = new Array[Byte](length)
    ThreadLocalRandom.current().nextBytes(bytes)
    bytes
  }

  @State(Scope.Benchmark)
  class BytesSt {
    val bytes = randomBytes(10000)
  }

  @State(Scope.Benchmark)
  class Base58St extends BytesSt {
    val base58 = Base58.encode(bytes)
  }
} 
Example 20
Source File: ProtocolBenchArr.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import shapeless._

@State(Scope.Benchmark)
class ProtocolBenchArr {
  final case class A(s: List[String])
  implicit val ev1: Arr ==> A =
    Read.infallible { arr =>
      A(arr.elements.map {
        case Bulk(s) => s
        case _ => ""
      })
    }

  private final def protocol = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[Long]]
  private final def protocolArr = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[A]]
  private final def protocolWithNull = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[Option[Long]]]
  private final def protocolArrWithNull = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[Option[A]]]
  private final def protocolPairs = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[(String, Long)]]

  private final val request  = "id" :: HNil

  private final val response1 = Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"))
  private final val response2 = Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"))
  private final val response3 = Arr(
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"))
  )
  private final val response4 = Arr(
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("2"), Bulk("1"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), NullBulk, Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), NullBulk, Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), NullBulk, Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), NullBulk, Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk)
  )
  private final val response5 = Arr(Bulk("abcd-1"), Bulk("1"), Bulk("abcd-2"), Bulk("2"), Bulk("abcd-3"), Bulk("3"), Bulk("abcd-4"), Bulk("4"), Bulk("abcd-5"), Bulk("5"), Bulk("abcd-6"), Bulk("6"), Bulk("abcd-7"), Bulk("7"), Bulk("abcd-8"), Bulk("8"), Bulk("abcd-9"), Bulk("9"), Bulk("abcd-10"), Bulk("10"), Bulk("abcd-11"), Bulk("11"), Bulk("abcd-12"), Bulk("12"), Bulk("abcd-13"), Bulk("13"), Bulk("abcd-14"), Bulk("14"), Bulk("abcd-15"), Bulk("15"), Bulk("abcd-16"), Bulk("16"), Bulk("abcd-17"), Bulk("17"), Bulk("abcd-18"), Bulk("18"), Bulk("abcd-19"), Bulk("19"), Bulk("abcd-20"), Bulk("20"), Bulk("abcd-1"), Bulk("1"), Bulk("abcd-2"), Bulk("2"), Bulk("abcd-3"), Bulk("3"), Bulk("abcd-4"), Bulk("4"), Bulk("abcd-5"), Bulk("5"), Bulk("abcd-6"), Bulk("6"), Bulk("abcd-7"), Bulk("7"), Bulk("abcd-8"), Bulk("8"), Bulk("abcd-9"), Bulk("9"), Bulk("abcd-10"), Bulk("10"), Bulk("abcd-11"), Bulk("11"), Bulk("abcd-12"), Bulk("12"), Bulk("abcd-13"), Bulk("13"), Bulk("abcd-14"), Bulk("14"), Bulk("abcd-15"), Bulk("15"), Bulk("abcd-16"), Bulk("16"), Bulk("abcd-17"), Bulk("17"), Bulk("abcd-18"), Bulk("18"), Bulk("abcd-19"), Bulk("19"), Bulk("abcd-20"), Bulk("20"))

  @Benchmark def decodeArrBaseline(bh: Blackhole) = {
    val decoded = protocol(request).decode(Arr(Nil))
    bh.consume(decoded)
  }

  @Benchmark def decodeArrOfBulk(bh: Blackhole) = {
    val decoded = protocol(request).decode(response1)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfBulkWithNull(bh: Blackhole) = {
    val decoded = protocolWithNull(request).decode(response2)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfArrOfBulk(bh: Blackhole) = {
    val decoded = protocol(request).decode(response3)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfArrOfBulkWithNull(bh: Blackhole) = {
    val decoded = protocolWithNull(request).decode(response4)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfPairs(bh: Blackhole) = {
    val decoded = protocolPairs(request).decode(response5)
    bh.consume(decoded)
  }
} 
Example 21
Source File: RESPFrameBench.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import scodec.bits.BitVector
import eu.timepit.refined.types.string.NonEmptyString
import java.nio.ByteBuffer

import laserdisc.RESPFrameFixture
import org.openjdk.jmh.infra.Blackhole

@State(Scope.Benchmark)
class RESPFrameBench extends RESPFrameFixture {

  val mixedNoArr = bytesOf(mixedNoArrList)
  val arrOneLevel = bytesOf(arrOneLevelList)
  val arrFiveLevels = bytesOf(arrFiveLevelsList)

  val empty = BitVector.empty.toByteBuffer
  val mixedNoArrFull = BitVector(mixedNoArr).toByteBuffer
  val arrOneLevelFull = BitVector(arrOneLevel).toByteBuffer
  val arrFiveLevelsFull = BitVector(arrFiveLevels).toByteBuffer

  @Benchmark def frameOfFullBaseline(bh: Blackhole)= {
    val frame = EmptyFrame.append(empty)
    bh.consume(frame)
  }
  @Benchmark def frameOfMixedNoArrFull(bh: Blackhole) = {
    val frame = EmptyFrame.append(mixedNoArrFull)
    bh.consume(frame)
  }
  @Benchmark def frameOfMixedArrOneLevelFull(bh: Blackhole) = {
    val frame = EmptyFrame.append(arrOneLevelFull)
    bh.consume(frame)
  }
  @Benchmark def frameOfMixedArrFiveLevelsFull(bh: Blackhole) = {
    val frame = EmptyFrame.append(arrFiveLevelsFull)
    bh.consume(frame)
  }

  val mixedNoArrSmallChunkBuffers    = groupInChunks(mixedNoArr, 128)
  val arrOneLevelSmallChunkBuffers   = groupInChunks(arrOneLevel, 128)
  val arrFiveLevelsSmallChunkBuffers = groupInChunks(arrFiveLevels, 128)

  @Benchmark def frameOfChunkedBaseline(bh: Blackhole)= {
    val frames = appendChunks(Iterator.empty[BitVector])
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedShortMixedNoArr(bh: Blackhole)= {
    val frames = appendChunks(mixedNoArrSmallChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedShortArrOneLevel(bh: Blackhole)   = {
    val frames = appendChunks(arrOneLevelSmallChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedShortArrFiveLevels(bh: Blackhole) = {
    val frames = appendChunks(arrFiveLevelsSmallChunkBuffers)
    bh.consume(frames)
  }

  val mixedNoArrBigChunkBuffers    = groupInChunks(mixedNoArr, 1024)
  val arrOneLevelBigChunkBuffers   = groupInChunks(arrOneLevel, 1024)
  val arrFiveLevelsBigChunkBuffers = groupInChunks(arrFiveLevels, 1024)

  @Benchmark def frameOfChunkedLongMixedNoArr(bh: Blackhole)    = {
    val frames = appendChunks(mixedNoArrBigChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedLongArrOneLevel(bh: Blackhole)   = {
    val frames = appendChunks(arrOneLevelBigChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedLongArrFiveLevels(bh: Blackhole) = {
    val frames = appendChunks(arrFiveLevelsBigChunkBuffers)
    bh.consume(frames)
  }
} 
Example 22
Source File: SerializedDataTxBenchmark.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.util.concurrent.TimeUnit

import benches.SerializedDataTxBenchmark.SerializedDataBenchState
import benches.Utils._
import encryBenchmark.{BenchSettings, Settings}
import org.encryfoundation.common.modifiers.mempool.transaction.{Transaction, TransactionSerializer}
import org.encryfoundation.common.modifiers.state.box.AssetBox
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}

class SerializedDataTxBenchmark {

  @Benchmark
  def deserializeDataTransactionsBench(stateBench: SerializedDataBenchState, bh: Blackhole): Unit =
    bh.consume(stateBench.serializedTransactions.map(b => TransactionSerializer.parseBytes(b)))

  @Benchmark
  def serializeDataTransactionsBench(stateBench: SerializedDataBenchState, bh: Blackhole): Unit =
    bh.consume(stateBench.initialTransactions.map(tx => tx.bytes))
}

object SerializedDataTxBenchmark extends BenchSettings {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[SerializedDataTxBenchmark].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class SerializedDataBenchState {

    var initialBoxes: IndexedSeq[AssetBox] = IndexedSeq.empty[AssetBox]
    var initialTransactions: IndexedSeq[Transaction] = IndexedSeq.empty[Transaction]
    var serializedTransactions: IndexedSeq[Array[Byte]] = IndexedSeq.empty[Array[Byte]]

    @Setup
    def createStateForBenchmark(): Unit = {
      initialBoxes = generateInitialBoxes(benchSettings.serializedDataBenchSettings.totalBoxesNumber)
      initialTransactions = generateDataTransactions(
        initialBoxes,
        benchSettings.serializedDataBenchSettings.numberOfInputs,
        benchSettings.serializedDataBenchSettings.numberOfOutputs,
        benchSettings.serializedDataBenchSettings.bytesQty
      )
      serializedTransactions = initialTransactions.map(tx => tx.bytes)
    }
  }
} 
Example 23
Source File: OrderBookCancelBenchmark.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.model.orderbook

import java.util.concurrent.{ThreadLocalRandom, TimeUnit}

import com.wavesplatform.dex.domain.order.Order
import com.wavesplatform.dex.model.OrderBook
import com.wavesplatform.dex.model.orderbook.OrderBookCancelBenchmark._
import com.wavesplatform.dex.model.state.OrderBookBenchmarkState
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import org.scalacheck.Gen

import scala.util.Random

@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class OrderBookCancelBenchmark {
//  @Benchmark def cancel_2500_to_1250_test(st: Cancel_2500_To_1250_State, bh: Blackhole): Unit = bh.consume { st.run() }
  @Benchmark def cancel_1250_to_0_test(st: Cancel_1250_To_0_State, bh: Blackhole): Unit       = bh.consume { st.run() }
}

object OrderBookCancelBenchmark {

  @State(Scope.Thread) class Cancel_2500_To_1250_State extends CancelState(initOrderNumber = 2500, orderNumberAfterCancel = 1250)
  @State(Scope.Thread) class Cancel_1250_To_0_State    extends CancelState(initOrderNumber = 1250, orderNumberAfterCancel = 0)

  sealed abstract class CancelState(initOrderNumber: Int, orderNumberAfterCancel: Int) extends OrderBookBenchmarkState {
    private val askPricesMin = 1000L * Order.PriceConstant
    private val askPricesMax = 2000L * Order.PriceConstant

    private val bidPricesMin = 1L * Order.PriceConstant
    private val bidPricesMax = 999L * Order.PriceConstant

    val orderBookGen: Gen[OrderBook] = fixedSidesOrdersGen(
      levelNumber = initOrderNumber / 2,
      orderNumberInLevel = 2,
      askPricesGen = Gen.choose(askPricesMin, askPricesMax),
      bidPricesGen = Gen.choose(bidPricesMin, bidPricesMax)
    ).map(Function.tupled(mkOrderBook))

    val orderBook: OrderBook = orderBookGen.sample.get
    val orders: Seq[Order.Id] = {
      val xs = orderBook.allOrders.map(_.order.id()).toVector
      new Random(ThreadLocalRandom.current()).shuffle(xs).take(initOrderNumber - orderNumberAfterCancel)
    }

    def run(): OrderBook = orders.foldLeft(orderBook) { case (r, id) => r.cancel(id, ts)._1 }
  }

} 
Example 24
Source File: Model.scala    From scala-concurrency-playground   with MIT License 5 votes vote down vote up
package org.zalando.benchmarks

import akka.actor.ActorSystem
import akka.pattern.after
import org.openjdk.jmh.infra.Blackhole

import scala.concurrent.Future
import scala.util.Random

case class Job(id: Int) {
  val payload = Array.fill(16000)(Random.nextInt())
}
case class JobResult(job: Job, result: Int)
case class PublishResult(result: JobResult)

object Computer {
  import ComputationFollowedByAsyncPublishing._

  def compute(job: Job): JobResult = {
    // jmh ensures that this really consumes CPU
    Blackhole consumeCPU numTokensToConsume
    JobResult(job, job.id)
  }
}

object Publisher {
  import ComputationFollowedByAsyncPublishing._

  // we use the scheduler and the dispatcher of the actor system here because it's so very convenient
  def publish(result: JobResult, system: ActorSystem): Future[PublishResult] =
    after(publishDuration, system.scheduler) {
      Future(PublishResult(result))(system.dispatcher)
    } (system.dispatcher)
} 
Example 25
Source File: SetCreateBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
 import cats.kernel.instances.all._

import scala.collection.immutable.{HashSet, SortedSet}

sealed trait SetCreateBenchOps {
  def createBulk: Any
  def createElements: Any
}

object SetCreateBenchOps extends BenchUtil {

  def apply(a: Seq[Int], kind: String) = {
    val a1 = a.map(mix).toArray
    require(a1.length == a.length)
    kind match {
      case "hashset" => ScalaCollectionBench(a1, x => HashSet.apply(x: _*))
      case "sortedset" => ScalaCollectionBench(a1, x => SortedSet.apply(x: _*))
      case "arrayset" => TypeClassBench(a1)
      case "arrayset2" => ???
    }
  }

  private final case class ScalaCollectionBench(a: Array[Int], f: Array[Int] => Any) extends SetCreateBenchOps {
    override def createBulk: Any = f(a)
    override def createElements: Any = f(a)
  }

  private final case class TypeClassBench(a: Array[Int]) extends SetCreateBenchOps {
    override def createBulk: Any = {
      ArraySet(a: _*)
    }
    override def createElements: Any = {
      a.foldLeft(ArraySet.empty[Int])(_ + _)
    }
  }
}

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetCreateBench {

  @Param(Array("1", "10", "100", "1000", "10000", "100000"))
  var size = 0

  @Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
  var kind = ""

  var bench: SetCreateBenchOps = _

  @Setup
  def setup(): Unit = {
    bench = SetCreateBenchOps(0 until size, kind)
  }

  @Benchmark
  def createBulk(x: Blackhole): Unit = x.consume(bench.createBulk)

  @Benchmark
  def createElements(x: Blackhole): Unit = x.consume(bench.createElements)
} 
Example 26
Source File: SetElementBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
 import cats.kernel.instances.all._
import ScalaCollectionConverters._

import scala.collection.immutable.{HashSet, SortedSet}

sealed trait SetElementBenchOps {
  def containsTrue: Any
  def containsFalse: Any
}

object SetElementBenchOps extends BenchUtil {

  def apply(a: Seq[Int], c: Int, n: Int, kind: String) = {
    val a1 = a.map(mix)
    val c1 = mix(c)
    val n1 = mix(n)
    require(a1.length == a.length)
    kind match {
      case "hashset" => ScalaCollectionBench(HashSet(a1: _*), c1, n1)
      case "sortedset" => ScalaCollectionBench(SortedSet(a1: _*), c1, n1)
      case "arrayset" => TypeClassBench(ArraySet(a1: _*), c1, n1)
      case "arrayset2" => ScalaCollectionBench(ArraySet(a1: _*).asCollection, c1, n1)
    }
  }

  private final case class ScalaCollectionBench(a: Set[Int], c: Int, n: Int) extends SetElementBenchOps {
    override def containsTrue: Boolean = a.contains(c)
    override def containsFalse: Boolean = a.contains(n)
  }

  private final case class TypeClassBench(a: ArraySet[Int], c: Int, n: Int) extends SetElementBenchOps {
    override def containsTrue: Boolean = a.contains(c)
    override def containsFalse: Boolean = a.contains(n)
  }
}

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetElementBench {

  @Param(Array("1", "10", "100", "1000", "10000", "100000"))
  var size = 0

  @Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
  var kind = ""

  var k: Int = 0
  var bench: SetElementBenchOps = _

  @Setup
  def setup(): Unit = {
    val c = (0.3 * size).toInt // a value that is contained in the set
    val n = (1.3 * size).toInt // a value that is not contained in the set
    bench = SetElementBenchOps(0 until size, c, n, kind)
  }

  @Benchmark
  def containsFalse(x: Blackhole): Unit = x.consume(bench.containsFalse)

  @Benchmark
  def containsTrue(x: Blackhole): Unit = x.consume(bench.containsTrue)
} 
Example 27
Source File: SetSetBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
 import cats.kernel.instances.all._
import ScalaCollectionConverters._

import scala.collection.immutable.{HashSet, SortedSet}

sealed trait SetSetBenchOps {
  def union: Any
  def intersect: Any
  def diff: Any
  def subsetOf: Boolean
  def filter(f: Int => Boolean): Any
}

object SetSetBenchOps extends BenchUtil {

  def apply(a: Seq[Int], b: Seq[Int], kind: String) = {
    val a1 = a.map(mix)
    val b1 = b.map(mix)
    kind match {
      case "hashset" => ScalaCollectionBench(HashSet(a1: _*), HashSet(b1: _*))
      case "sortedset" => ScalaCollectionBench(SortedSet(a1: _*), SortedSet(b1: _*))
      case "arrayset" => TypeClassBench(ArraySet(a1: _*), ArraySet(b1: _*))
      case "arrayset2" => ScalaCollectionBench(ArraySet(a1: _*).asCollection, ArraySet(b1: _*).asCollection)
    }
  }

  private final case class ScalaCollectionBench(a: Set[Int], b: Set[Int]) extends SetSetBenchOps {
    override def union: Any = a union b
    override def diff: Any = a diff b
    override def subsetOf: Boolean = a subsetOf b
    override def intersect: Any = a intersect b
    override def filter(f: (Int) => Boolean): Any = a filter f
  }

  private final case class TypeClassBench(a: ArraySet[Int], b: ArraySet[Int]) extends SetSetBenchOps {
    override def union: Any = a union b
    override def diff: Any = a diff b
    override def subsetOf: Boolean = a subsetOf b
    override def intersect: Any = a intersect b
    override def filter(f: (Int) => Boolean): Any = a filter f
  }
}

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SetSetBench {

  @Param(Array("1", "10", "100", "1000", "10000", "100000"))
  var size = 0

  @Param(Array("0.0", "0.5", "1.0"))
//  @Param(Array("0.5"))
  var offset = 0.0

  @Param(Array("arrayset", "hashset", "sortedset")) //, "arrayset2"))
  var kind = ""

  var k: Int = 0
  var bench: SetSetBenchOps = _

  val shift = 1000000 // so we don't get the cached java.lang.Integer instances

  @Setup
  def setup(): Unit = {
    k = (offset * size).toInt
    bench = SetSetBenchOps(shift until (shift + size), (shift + k) until (shift + k + size), kind)
  }


  @Benchmark
  def union(x: Blackhole): Unit = {
    x.consume(bench.union)
  }

  @Benchmark
  def intersect(x: Blackhole): Unit = {
    x.consume(bench.intersect)
  }

  @Benchmark
  def diff(x: Blackhole): Unit = {
    x.consume(bench.diff)
  }

  @Benchmark
  def subsetOf(x: Blackhole): Unit = {
    x.consume(bench.subsetOf)
  }

  @Benchmark
  def filter(x: Blackhole): Unit = {
    x.consume(bench.filter(_ < k + shift))
  }
} 
Example 28
Source File: SonicReducerBench.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import java.util.concurrent.TimeUnit

import com.rklaehn.sonicreducer.Reducer
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@State(Scope.Thread)
class SonicReducerBench {

  @Param(Array("1", "10", "100", "1000", "10000"))
  var size = 0

  var data: Array[String] = null

  @Setup
  def setup(): Unit = {
    data = (0 until size).map(_.toString).toArray
  }

  def foldLeft(): String = {
    data.foldLeft("")(_ + _)
  }

  def reduceIterable(): String = {
    Reducer.reduce(data)(_ + _).get
  }

  def reduceArray(): String = {
    Reducer.reduceArray(data)(_ + _).get
  }

  def stringBuilder(): String = {
    val result = new StringBuilder
    for(x <- data)
      result append x
    result.toString()
  }

  @Benchmark
  def foldLeft(x: Blackhole): Unit = x.consume(foldLeft)

  @Benchmark
  def reduceIterable(x: Blackhole): Unit = x.consume(reduceIterable())

  @Benchmark
  def reduceArray(x: Blackhole): Unit = x.consume(reduceArray())

  @Benchmark
  def stringBuilder(x: Blackhole): Unit = x.consume(stringBuilder())
} 
Example 29
Source File: SerializedMonetaryTxBenchmark.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.util.concurrent.TimeUnit

import benches.SerializedMonetaryTxBenchmark.SerializedMonetaryBenchState
import benches.Utils._
import encryBenchmark.{BenchSettings, Settings}
import org.encryfoundation.common.modifiers.mempool.transaction.{Transaction, TransactionSerializer}
import org.encryfoundation.common.modifiers.state.box.AssetBox
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}

class SerializedMonetaryTxBenchmark {

  @Benchmark
  def deserializePaymentTransactionsBench(stateBench: SerializedMonetaryBenchState, bh: Blackhole): Unit =
    bh.consume(stateBench.serializedTransactions.map(b => TransactionSerializer.parseBytes(b)))

  @Benchmark
  def serializePaymentTransactionsBench(stateBench: SerializedMonetaryBenchState, bh: Blackhole): Unit =
    bh.consume(stateBench.initialTransactions.map(tx => tx.bytes))
}

object SerializedMonetaryTxBenchmark extends BenchSettings {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[SerializedMonetaryTxBenchmark].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class SerializedMonetaryBenchState {

    var initialBoxes: IndexedSeq[AssetBox] = IndexedSeq.empty[AssetBox]
    var initialTransactions: IndexedSeq[Transaction] = IndexedSeq.empty[Transaction]
    var serializedTransactions: IndexedSeq[Array[Byte]] = IndexedSeq.empty[Array[Byte]]

    @Setup
    def createStateForBenchmark(): Unit = {
      initialBoxes = generateInitialBoxes(benchSettings.serializedMonetaryBenchSettings.totalBoxesNumber)
      initialTransactions =
        generatePaymentTransactions(
          initialBoxes,
          benchSettings.serializedMonetaryBenchSettings.numberOfInputs,
          benchSettings.serializedMonetaryBenchSettings.numberOfOutputs
        )
      serializedTransactions = initialTransactions.map(tx => tx.bytes)
    }
  }
} 
Example 30
Source File: SnapshotAssemblerBench.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.io.File
import java.util.concurrent.TimeUnit

import benches.SnapshotAssemblerBench.SnapshotAssemblerBenchState
import encry.view.state.avlTree.utils.implicits.Instances._
import benches.StateBenches.{StateBenchState, benchSettings}
import benches.Utils.{getRandomTempDir, utxoFromBoxHolder}
import encry.settings.Settings
import encry.storage.{RootNodesStorage, VersionalStorage}
import encry.storage.VersionalStorage.{StorageKey, StorageValue, StorageVersion}
import encry.storage.levelDb.versionalLevelDB.{LevelDbFactory, VLDBWrapper, VersionalLevelDBCompanion}
import encry.utils.FileHelper
import encry.view.fast.sync.SnapshotHolder
import encry.view.state.UtxoState
import encry.view.state.avlTree.AvlTree
import org.encryfoundation.common.utils.TaggedTypes.Height
import org.iq80.leveldb.{DB, Options}
import org.openjdk.jmh.annotations.{Benchmark, Mode, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}
import scorex.utils.Random

class SnapshotAssemblerBench {

  
  @Benchmark
  def createTree(stateBench: SnapshotAssemblerBenchState, bh: Blackhole): Unit = {
    bh.consume {
      //stateBench.a.initializeSnapshotData(stateBench.block1)
    }
  }
}
object SnapshotAssemblerBench {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[SnapshotAssemblerBench].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class SnapshotAssemblerBenchState extends Settings {

    val a: AvlTree[StorageKey, StorageValue] =
      createAvl("9gKDVmfsA6J4b78jDBx6JmS86Zph98NnjnUqTJBkW7zitQMReia", 0, 500000)
    val block1                              = Utils.generateGenesisBlock(Height @@ 1)


    def createAvl(address: String, from: Int, to: Int): AvlTree[StorageKey, StorageValue] = {
      val firstDir: File = FileHelper.getRandomTempDir
      val firstStorage: VLDBWrapper = {
        val levelDBInit = LevelDbFactory.factory.open(firstDir, new Options)
        VLDBWrapper(VersionalLevelDBCompanion(levelDBInit, settings.levelDB, keySize = 32))
      }
      val dir: File = FileHelper.getRandomTempDir
      val levelDb: DB = LevelDbFactory.factory.open(dir, new Options)
      val rootNodesStorage = RootNodesStorage[StorageKey, StorageValue](levelDb, 10, dir)

      val firstAvl: AvlTree[StorageKey, StorageValue] = AvlTree[StorageKey, StorageValue](firstStorage, rootNodesStorage)
      val avlNew = (from to to).foldLeft(firstAvl) { case (avl, i) =>
        val bx = Utils.genAssetBox(address, i, nonce = i)
        val b = (StorageKey !@@ bx.id, StorageValue @@ bx.bytes)
        avl.insertAndDeleteMany(StorageVersion @@ Random.randomBytes(), List(b), List.empty)
      }
      avlNew
    }

    def tmpDir: File = FileHelper.getRandomTempDir
  }

} 
Example 31
Source File: HistoryBenches.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.io.File
import java.util.concurrent.TimeUnit

import benches.HistoryBenches.HistoryBenchState
import benches.Utils._
import encry.view.history.History
import encryBenchmark.BenchSettings
import org.encryfoundation.common.modifiers.history.Block
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}

class HistoryBenches {

  @Benchmark
  def appendBlocksToHistoryBench(benchStateHistory: HistoryBenchState, bh: Blackhole): Unit = {
    bh.consume {
      val history: History = generateHistory(benchStateHistory.settings, getRandomTempDir)
      benchStateHistory.blocks.foldLeft(history) { case (historyL, block) =>
        historyL.append(block.header)
        historyL.append(block.payload)
        historyL.reportModifierIsValid(block)
      }
      history.closeStorage()
    }
  }

  @Benchmark
  def readHistoryFileBench(benchStateHistory: HistoryBenchState, bh: Blackhole): Unit = {
    bh.consume {
      val history: History = generateHistory(benchStateHistory.settings, benchStateHistory.tmpDir)
      history.closeStorage()
    }
  }
}

object HistoryBenches extends BenchSettings {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[HistoryBenches].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class HistoryBenchState extends encry.settings.Settings {

    val tmpDir: File = getRandomTempDir
    val initialHistory: History = generateHistory(settings, tmpDir)

    val resultedHistory: (History, Option[Block], Vector[Block]) =
      (0 until benchSettings.historyBenchSettings.blocksNumber)
        .foldLeft(initialHistory, Option.empty[Block], Vector.empty[Block]) {
          case ((prevHistory, prevBlock, vector), _) =>
            val block: Block =
              generateNextBlockValidForHistory(prevHistory, 0, prevBlock,  Seq(coinbaseTransaction(0)))
            prevHistory.append(block.header)
            prevHistory.append(block.payload)
            (prevHistory.reportModifierIsValid(block), Some(block), vector :+ block)
        }
    resultedHistory._1.closeStorage()

    val blocks: Vector[Block] = resultedHistory._3
  }
} 
Example 32
Source File: OrderBookAddBenchmark.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.model.orderbook

import java.util.concurrent.TimeUnit

import com.wavesplatform.dex.domain.order.{Order, OrderType}
import com.wavesplatform.dex.model.orderbook.OrderBookAddBenchmark._
import com.wavesplatform.dex.model.state.OrderBookBenchmarkState
import com.wavesplatform.dex.model.{AcceptedOrder, OrderBook}
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import org.scalacheck.Gen

import scala.collection.JavaConverters._

@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(4)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class OrderBookAddBenchmark {
  @Benchmark def add_0_plus_1250_test(st: Add_0_To_1250_State, bh: Blackhole): Unit       = bh.consume { st.run() }
  @Benchmark def add_1250_plus_1250_test(st: Add_1250_To_1250_State, bh: Blackhole): Unit = bh.consume { st.run() }
}

object OrderBookAddBenchmark {

  @State(Scope.Thread) class Add_0_To_1250_State    extends AddState(initOrderNumber = 0, orderNumberToAdd = 1250)
  @State(Scope.Thread) class Add_1250_To_1250_State extends AddState(initOrderNumber = 1250, orderNumberToAdd = 1250)

  sealed abstract class AddState(initOrderNumber: Int, orderNumberToAdd: Int) extends OrderBookBenchmarkState {
    val maxPrice = 1000L * Order.PriceConstant
    val minPrice = 1L * Order.PriceConstant
    val priceGen = Gen.chooseNum(minPrice, maxPrice)

    val askGen = orderGen(priceGen, OrderType.SELL)
    val bidGen = orderGen(priceGen, OrderType.BUY)

    val orderBook: OrderBook = ordersGen(initOrderNumber).sample.get.foldLeft(OrderBook.empty)(_.add(_, ts, getMakerTakerFee).orderBook)

    val orders: List[AcceptedOrder] = ordersGen(orderNumberToAdd).sample.get

    def run(): OrderBook = orders.foldLeft(OrderBook.empty) {
      case (r, o) => r.add(o, ts, getMakerTakerFee).orderBook
    }

    def ordersGen(orderNumber: Int): Gen[List[AcceptedOrder]] =
      for {
        orderSides <- Gen.listOfN(orderNumber, orderSideGen)
        orders <- Gen.sequence {
          orderSides.map { side =>
            val orderGen = if (side == OrderType.SELL) askGen else bidGen
            Gen.oneOf(limitOrderGen(orderGen), marketOrderGen(orderGen))
          }
        }
      } yield orders.asScala.toList
  }

} 
Example 33
Source File: SerializedAssetTransactionBenchmark.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.util.concurrent.TimeUnit

import benches.SerializedAssetTransactionBenchmark.SerializedAssetBenchState
import benches.Utils._
import encryBenchmark.{BenchSettings, Settings}
import org.encryfoundation.common.modifiers.mempool.transaction.{Transaction, TransactionSerializer}
import org.encryfoundation.common.modifiers.state.box.AssetBox
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}

class SerializedAssetTransactionBenchmark {

  @Benchmark
  def deserializeAssetTransactionsBench(stateBench: SerializedAssetBenchState, bh: Blackhole): Unit =
    bh.consume(stateBench.serializedTransactions.map(b => TransactionSerializer.parseBytes(b)))

  @Benchmark
  def serializeAssetTransactionsBench(stateBench: SerializedAssetBenchState, bh: Blackhole): Unit =
    bh.consume(stateBench.initialTransactions.map(tx => tx.bytes))
}

object SerializedAssetTransactionBenchmark extends BenchSettings {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[SerializedAssetTransactionBenchmark].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class SerializedAssetBenchState {

    var initialBoxes: IndexedSeq[AssetBox] = IndexedSeq.empty[AssetBox]
    var initialTransactions: IndexedSeq[Transaction] = IndexedSeq.empty[Transaction]
    var serializedTransactions: IndexedSeq[Array[Byte]] = IndexedSeq.empty[Array[Byte]]

    @Setup
    def createStateForBenchmark(): Unit = {
      initialBoxes = generateInitialBoxes(benchSettings.serializedAssetBenchSettings.totalBoxesNumber)
      initialTransactions =
        generateAssetTransactions(
          initialBoxes,
          benchSettings.serializedAssetBenchSettings.numberOfInputs,
          benchSettings.serializedAssetBenchSettings.numberOfOutputs
        )
      serializedTransactions = initialTransactions.map(tx => tx.bytes)
    }
  }
} 
Example 34
Source File: StateRollbackBench.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.io.File
import java.util.concurrent.TimeUnit

import benches.StateRollbackBench.StateRollbackState
import benches.Utils._
import encry.storage.VersionalStorage
import encry.utils.CoreTaggedTypes.VersionTag
import encry.view.state.{BoxHolder, UtxoState}
import encryBenchmark.{BenchSettings, Settings}
import org.encryfoundation.common.modifiers.history.Block
import org.encryfoundation.common.modifiers.state.box.AssetBox
import org.encryfoundation.common.utils.TaggedTypes.{ADKey, Difficulty}
import org.openjdk.jmh.annotations.{Benchmark, Mode, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}

class StateRollbackBench {

  @Benchmark
  def applyBlocksToTheState(stateBench: StateRollbackState, bh: Blackhole): Unit = {
    bh.consume {
      val innerState: UtxoState =
        utxoFromBoxHolder(stateBench.boxesHolder, getRandomTempDir, None, stateBench.settings, VersionalStorage.IODB)
      val newState = stateBench.chain.foldLeft(innerState -> List.empty[VersionTag]) { case ((state, rootHashes), block) =>
        val newState = state.applyModifier(block).right.get
        newState -> (rootHashes :+ newState.version)
      }
      val stateAfterRollback = newState._1.rollbackTo(newState._2.dropRight(1).last, List.empty).get
      val stateAfterForkBlockApplying = stateAfterRollback.applyModifier(stateBench.forkBlocks.last).right.get
      stateAfterForkBlockApplying.close()
    }
  }
}

object StateRollbackBench extends BenchSettings {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[StateRollbackBench].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class StateRollbackState extends encry.settings.Settings {

    val tmpDir: File = getRandomTempDir

    val initialBoxes: IndexedSeq[AssetBox] = (0 until benchSettings.stateBenchSettings.totalBoxesNumber).map(nonce =>
      genHardcodedBox(privKey.publicImage.address.address, nonce)
    )
    val boxesHolder: BoxHolder = BoxHolder(initialBoxes)
    var state: UtxoState = utxoFromBoxHolder(boxesHolder, tmpDir, None, settings, VersionalStorage.LevelDB)
    val genesisBlock: Block = generateGenesisBlockValidForState(state)

    state = state.applyModifier(genesisBlock).right.get

    val stateGenerationResults: (List[(Block, Block)], Block, UtxoState, IndexedSeq[AssetBox]) =
      (0 until benchSettings.stateBenchSettings.blocksNumber).foldLeft(List.empty[(Block, Block)], genesisBlock, state, initialBoxes) {
        case ((blocks, block, stateL, boxes), _) =>
          val nextBlockMainChain: Block = generateNextBlockForStateWithSpendingAllPreviousBoxes(
            block,
            stateL,
            block.payload.txs.flatMap(_.newBoxes.map(_.asInstanceOf[AssetBox])).toIndexedSeq)
          val nextBlockFork: Block = generateNextBlockForStateWithSpendingAllPreviousBoxes(
            block,
            stateL,
            block.payload.txs.flatMap(_.newBoxes.map(_.asInstanceOf[AssetBox])).toIndexedSeq,
            addDiff = Difficulty @@ BigInt(100)
          )
          val stateN: UtxoState = stateL.applyModifier(nextBlockMainChain).right.get
          (blocks :+ (nextBlockMainChain, nextBlockFork),
            nextBlockMainChain,
            stateN,
            boxes.drop(
              benchSettings.stateBenchSettings.transactionsNumberInEachBlock *
                benchSettings.stateBenchSettings.numberOfInputsInOneTransaction)
          )
      }
    val chain: List[Block] = genesisBlock +: stateGenerationResults._1.map(_._1)
    val forkBlocks: List[Block] = genesisBlock +: stateGenerationResults._1.map(_._2)
    state = stateGenerationResults._3
    state.close()
  }
} 
Example 35
Source File: VersionalLevelDBBanches.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.util.concurrent.TimeUnit
import benches.VersionalLevelDBBanches.VersionalLevelDBState
import encry.settings.LevelDBSettings
import encry.storage.levelDb.versionalLevelDB.{LevelDbFactory, VersionalLevelDBCompanion}
import encry.utils.FileHelper
import org.iq80.leveldb.Options
import org.openjdk.jmh.annotations.{Benchmark, Mode, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}
import org.openjdk.jmh.runner.{Runner, RunnerException}

class VersionalLevelDBBanches {

  @Benchmark
  def versionalLevelDbInsertion(benchStateHistory: VersionalLevelDBState, bh: Blackhole): Unit = {
    bh.consume {
      val tempDir = FileHelper.getRandomTempDir

      val levelDBInit = LevelDbFactory.factory.open(tempDir, new Options)

      val vldbInit = VersionalLevelDBCompanion(levelDBInit, LevelDBSettings(100))

      benchStateHistory.elems10k.foreach(vldbInit.insert)

      vldbInit.close()
    }
  }
}

object VersionalLevelDBBanches {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[VersionalLevelDBBanches].getSimpleName + ".*")
      .forks(1)
      .threads(2)
      .warmupIterations(1)
      .measurementIterations(1)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(500))
      .measurementTime(TimeValue.minutes(5))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class VersionalLevelDBState {

    //val elems1k = Utils.generateRandomLevelDbElemsWithoutDeletions(1000, 100)
    //val elems5k = Utils.generateRandomLevelDbElemsWithoutDeletions(5000, 100)
    val elems10k = Utils.generateRandomLevelDbElemsWithoutDeletions(10000, 100)
    //val elems30k = Utils.generateRandomLevelDbElemsWithoutDeletions(30000, 100)
  }
} 
Example 36
Source File: StandaloneAhcWSRequestBenchMapsBench.scala    From play-ws   with Apache License 2.0 5 votes vote down vote up
package play.api.libs.ws.ahc

import java.util.concurrent.TimeUnit

import akka.stream.Materializer
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import play.api.libs.ws.StandaloneWSRequest


@OutputTimeUnit(TimeUnit.NANOSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Fork(jvmArgsAppend = Array("-Xmx350m", "-XX:+HeapDumpOnOutOfMemoryError"), value = 1)
@State(Scope.Benchmark)
class StandaloneAhcWSRequestBenchMapsBench {

  private implicit val materializer: Materializer = null // we're not actually going to execute anything.
  private var exampleRequest: StandaloneWSRequest = _

  @Param(Array("1", "10", "100", "1000", "10000"))
  private var size: Int = _

  @Setup def setup(): Unit = {
    val params = (1 to size)
      .map(_.toString)
      .map(s => s -> s)

    exampleRequest = StandaloneAhcWSRequest(new StandaloneAhcWSClient(null), "https://www.example.com")
      .addQueryStringParameters(params: _*)
      .addHttpHeaders(params: _*)
  }

  @Benchmark
  def addQueryParams(bh: Blackhole): Unit = {
    bh.consume(exampleRequest.addQueryStringParameters("nthParam" -> "nthParam"))
  }

  @Benchmark
  def addHeaders(bh: Blackhole): Unit = {
    bh.consume(exampleRequest.addHttpHeaders("nthHeader" -> "nthHeader"))
  }
} 
Example 37
Source File: ListFillBenchmarks.scala    From collection-strawman   with Apache License 2.0 5 votes vote down vote up
package strawman.collection.immutable

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import strawman.collection.immutable

@BenchmarkMode(scala.Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Fork(1)
@Warmup(iterations = 8)
@Measurement(iterations = 8)
@State(Scope.Benchmark)
class ListFillBenchmarks {

  @Param(scala.Array("0", "1", "2", "3", "4", "7", "8", "15", "16", "17", "39", "282", "4096", "131070", "7312102"))
  var size: Int = _

  @Benchmark
  def fill(bh: Blackhole): Unit = bh.consume(immutable.List.fill(size)(()))

}

@BenchmarkMode(scala.Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Fork(1)
@Warmup(iterations = 8)
@Measurement(iterations = 8)
@State(Scope.Benchmark)
class ScalaListFillBenchmarks {

  @Param(scala.Array("0", "1", "2", "3", "4", "7", "8", "15", "16", "17", "39", "282", "4096", "131070", "7312102"))
  var size: Int = _

  @Benchmark
  def fill(bh: Blackhole): Unit = bh.consume(scala.List.fill(size)(()))

} 
Example 38
Source File: PingPongBenchmark.scala    From zio   with Apache License 2.0 5 votes vote down vote up
package zio.internal

import java.util.concurrent.TimeUnit

import BenchUtils._
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.{ Blackhole, Control }

@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@Warmup(iterations = 10, time = 1)
@Measurement(iterations = 10, time = 1)
@Fork(5)
@State(Scope.Group)
private[this] class PingPongBenchmark {
  def mkEl(): AnyRef = new Object()
  val emptyEl        = null.asInstanceOf[AnyRef]

  @Param(Array("1", "2"))
  var batchSize: Int = _

  @Param(Array("4"))
  var qCapacity: Int = _

  @Param(Array("RingBufferPow2", "JCTools", "LinkedQueue", "JucBlocking"))
  var qType: String = _

  var qIn: MutableConcurrentQueue[AnyRef]  = _
  var qOut: MutableConcurrentQueue[AnyRef] = _

  @Setup(Level.Trial)
  def setup(): Unit = {
    qIn = queueByType(qType, qCapacity)
    qOut = queueByType(qType, qCapacity)
  }

  @Benchmark
  @Group("A")
  @GroupThreads(1)
  def roundtrip(control: Control, blackhole: Blackhole): Unit = {
    var i = 0
    while (i < batchSize) { qIn.offer(mkEl()); i += 1 }

    i = 0
    while (i < batchSize && !control.stopMeasurement) {
      var anEl = qOut.poll(emptyEl)
      while (!control.stopMeasurement && (anEl == emptyEl)) anEl = qOut.poll(emptyEl)
      blackhole.consume(anEl)
      i += 1
    }
  }

  @Benchmark
  @Group("A")
  @GroupThreads(1)
  def poll(control: Control): Unit = {
    var i: Int = 0

    while (i < batchSize && !control.stopMeasurement) {
      var anEl = qIn.poll(emptyEl)
      while (!control.stopMeasurement && (anEl == emptyEl)) anEl = qIn.poll(emptyEl)
      if (anEl != emptyEl) qOut.offer(anEl) else qOut.offer(mkEl())
      i += 1
    }
  }
} 
Example 39
Source File: JSONToMsgPackBenchmark.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.benchmark.json_stream

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import wvlet.airframe.benchmark.json.JSONBenchmark
import wvlet.airframe.codec.JSONValueCodec
import wvlet.airframe.json.{JSON, JSONSource}
import wvlet.airframe.msgpack.json.{NestedMessagePackBuilder, StreamMessagePackBuilder}
import wvlet.airframe.msgpack.spi.MessagePack


abstract class JSONToMsgPackBenchmarkBase {
  protected val json: String

  @Benchmark
  def jsonValue(blackhole: Blackhole): Unit = {
    blackhole.consume(JSONValueCodec.toMsgPack(JSON.parse(json)))
  }

  @Benchmark
  def nested(blackHole: Blackhole): Unit = {
    blackHole.consume(NestedMessagePackBuilder.fromJSON(JSONSource.fromString(json)))
  }

  @Benchmark
  def twoPass(blackHole: Blackhole): Unit = {
    blackHole.consume(StreamMessagePackBuilder.fromJSON(JSONSource.fromString(json)))
  }
}
@State(Scope.Benchmark)
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
class TwitterJSON extends JSONToMsgPackBenchmarkBase {
  override protected val json: String = JSONBenchmark.twitterJson
}

@State(Scope.Benchmark)
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
class TwitterSingleJSON extends JSONToMsgPackBenchmarkBase {
  override protected val json: String = JSONBenchmark.twitterSingleJson
}

@State(Scope.Benchmark)
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
class IntArraySON extends JSONToMsgPackBenchmarkBase {
  override protected val json: String = JSONBenchmark.jsonIntArray
} 
Example 40
Source File: JavaSerializationBenchmark.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package rpc.akka.serialization

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup}
import org.openjdk.jmh.infra.Blackhole


@Warmup(iterations = 5)
@Measurement(iterations = 20)
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
@State(Scope.Thread)
class JavaSerializationBenchmark {

  val something = Something(42, Nested(4 :: 8 :: 15 :: 16 :: 23 :: 42 :: Nil, 0), "lol")
  val array = {
    val baos = new ByteArrayOutputStream()
    val o = new ObjectOutputStream(baos)

    o.writeObject(something)
    o.close()

    baos.toByteArray
  }

  @Benchmark
  def byteStringOutput(): Something = {
    val baos = new ByteArrayOutputStream()
    val o = new ObjectOutputStream(baos)

    o.writeObject(something)
    o.close()

    val array = baos.toByteArray

    new ObjectInputStream(new ByteArrayInputStream(array)).readObject().asInstanceOf[Something]
  }

  @Benchmark
  def writeTest(): Array[Byte] = {
    val baos = new ByteArrayOutputStream()
    val o = new ObjectOutputStream(baos)

    o.writeObject(something)
    o.close()

    baos.toByteArray
  }

  @Benchmark
  def readTest(): Something = {
    new ObjectInputStream(new ByteArrayInputStream(array)).readObject().asInstanceOf[Something]
  }
} 
Example 41
Source File: StreamInputOutputBenchmark.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package ser

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import com.avsystem.commons.serialization.{GenCodec, StreamInput, StreamOutput}
import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup}
import org.openjdk.jmh.infra.Blackhole


case class Toplevel(int: Int, nested: Nested, str: String)
case class Nested(list: List[Int], int: Int)

object Toplevel {
  implicit val nestedCodec: GenCodec[Nested] = GenCodec.materialize[Nested]
  implicit val codec: GenCodec[Toplevel] = GenCodec.materialize[Toplevel]
}

@Warmup(iterations = 10)
@Measurement(iterations = 20)
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
@State(Scope.Thread)
class StreamInputOutputBenchmark {

  val something = Toplevel(35, Nested(List(121, 122, 123, 124, 125, 126), 53), "lol")

  val inputArray: Array[Byte] = {
    val os = new ByteArrayOutputStream()

    GenCodec.write(new StreamOutput(new DataOutputStream(os)), something)
    os.toByteArray
  }

  @Benchmark
  def testEncode(bh: Blackhole): Unit = {
    val os = new ByteArrayOutputStream(inputArray.length)
    val output = new StreamOutput(new DataOutputStream(os))
    GenCodec.write(output, something)
    bh.consume(os.toByteArray)
  }

  @Benchmark
  def testDecode(bh: Blackhole): Unit = {
    val is = new DataInputStream(new ByteArrayInputStream(inputArray))
    val input = new StreamInput(is)
    bh.consume(GenCodec.read[Toplevel](input))
  }

  @Benchmark
  def testEncodeRaw(bh: Blackhole): Unit = {
    val os = new ByteArrayOutputStream(inputArray.length)
    val output = new StreamOutput(new DataOutputStream(os))
    val toplevelOutput = output.writeObject()
    toplevelOutput.writeField("int").writeSimple().writeInt(35)
    val nestedOutput = toplevelOutput.writeField("nested").writeObject()
    val listOutput = nestedOutput.writeField("list").writeList()
    listOutput.writeElement().writeSimple().writeInt(121)
    listOutput.writeElement().writeSimple().writeInt(122)
    listOutput.writeElement().writeSimple().writeInt(123)
    listOutput.writeElement().writeSimple().writeInt(124)
    listOutput.writeElement().writeSimple().writeInt(125)
    listOutput.writeElement().writeSimple().writeInt(126)
    listOutput.finish()
    nestedOutput.writeField("int").writeSimple().writeInt(53)
    nestedOutput.finish()
    toplevelOutput.writeField("str").writeSimple().writeString("lol")
    toplevelOutput.finish()
    bh.consume(os.toByteArray)
  }

  @Benchmark
  def testDecodeRaw(bh: Blackhole): Unit = {
    val is = new DataInputStream(new ByteArrayInputStream(inputArray))
    val input = new StreamInput(is)
    val objInput = input.readObject()
    val intField = objInput.nextField().readSimple().readInt()
    val nestedInput = objInput.nextField().readObject()
    val listInput = nestedInput.nextField().readList()
    val listNested = List(
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt()
    )
    listInput.hasNext
    val intNested = nestedInput.nextField().readSimple().readInt()
    nestedInput.hasNext
    val strField = objInput.nextField().readSimple().readString()
    objInput.hasNext
    bh.consume(Toplevel(intField, Nested(listNested, intNested), strField))
  }
}