org.junit.Test Scala Examples

The following examples show how to use org.junit.Test. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: NaptimeModuleTest.scala    From naptime   with Apache License 2.0 8 votes vote down vote up
package org.coursera.naptime

import java.util.Date
import javax.inject.Inject

import akka.stream.Materializer
import com.google.inject.Guice
import com.google.inject.Stage
import com.linkedin.data.schema.DataSchema
import com.linkedin.data.schema.DataSchemaUtil
import com.linkedin.data.schema.PrimitiveDataSchema
import com.linkedin.data.schema.RecordDataSchema
import org.coursera.naptime.model.KeyFormat
import org.coursera.naptime.resources.TopLevelCollectionResource
import org.coursera.naptime.router2.NaptimeRoutes
import org.junit.Test
import org.mockito.Mockito.mock
import org.scalatest.junit.AssertionsForJUnit
import play.api.libs.json.Json
import play.api.libs.json.OFormat

import scala.concurrent.ExecutionContext

object NaptimeModuleTest {
  case class User(name: String, createdAt: Date)
  object User {
    implicit val oFormat: OFormat[User] = Json.format[User]
  }
  class MyResource(implicit val executionContext: ExecutionContext, val materializer: Materializer)
      extends TopLevelCollectionResource[String, User] {
    override implicit def resourceFormat: OFormat[User] = User.oFormat
    override def keyFormat: KeyFormat[KeyType] = KeyFormat.stringKeyFormat
    override def resourceName: String = "myResource"
    implicit val fields = Fields

    def get(id: String) = Nap.get(ctx => ???)
  }
  object MyFakeModule extends NaptimeModule {
    override def configure(): Unit = {
      bindResource[MyResource]
      bind[MyResource].toInstance(mock(classOf[MyResource]))
      bindSchemaType[Date](DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.LONG))
    }
  }

  class OverrideTypesHelper @Inject()(val schemaOverrideTypes: NaptimeModule.SchemaTypeOverrides)
}

class NaptimeModuleTest extends AssertionsForJUnit {
  import NaptimeModuleTest._

  
  @Test
  def checkInferredOverrides(): Unit = {
    val injector = Guice.createInjector(Stage.DEVELOPMENT, MyFakeModule, NaptimeModule)
    val overrides = injector.getInstance(classOf[OverrideTypesHelper])
    assert(overrides.schemaOverrideTypes.size === 1)
    assert(overrides.schemaOverrideTypes.contains("java.util.Date"))
  }

  @Test
  def checkComputedOverrides(): Unit = {
    val injector = Guice.createInjector(Stage.DEVELOPMENT, MyFakeModule, NaptimeModule)
    val overrides = injector.getInstance(classOf[OverrideTypesHelper])
    val routes = injector.getInstance(classOf[NaptimeRoutes])
    assert(1 === routes.routerBuilders.size)
    val routerBuilder = routes.routerBuilders.head
    val inferredSchemaKeyed =
      routerBuilder.types.find(_.key == "org.coursera.naptime.NaptimeModuleTest.User").get
    assert(inferredSchemaKeyed.value.isInstanceOf[RecordDataSchema])
    val userSchema = inferredSchemaKeyed.value.asInstanceOf[RecordDataSchema]
    assert(2 === userSchema.getFields.size())
    val initialCreatedAtSchema = userSchema.getField("createdAt").getType.getDereferencedDataSchema
    assert(initialCreatedAtSchema.isInstanceOf[RecordDataSchema])
    assert(
      initialCreatedAtSchema
        .asInstanceOf[RecordDataSchema]
        .getDoc
        .contains("Unable to infer schema"))
    SchemaUtils.fixupInferredSchemas(userSchema, overrides.schemaOverrideTypes)
    val fixedCreatedAtSchema = userSchema.getField("createdAt").getType.getDereferencedDataSchema
    assert(fixedCreatedAtSchema.isInstanceOf[PrimitiveDataSchema])
  }
} 
Example 2
Source File: TestOracleDataTypeConverter.scala    From ohara   with Apache License 2.0 7 votes vote down vote up
package oharastream.ohara.connector.jdbc.datatype

import java.sql.ResultSet

import oharastream.ohara.client.configurator.InspectApi.RdbColumn
import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.mockito.Mockito
import org.mockito.Mockito.when
import org.scalatest.matchers.should.Matchers._

class TestOracleDataTypeConverter extends OharaTest {
  @Test
  def testConverterCharValue(): Unit = {
    val resultSet: ResultSet = Mockito.mock(classOf[ResultSet])
    when(resultSet.getString("column1")).thenReturn("value1")
    val column                  = RdbColumn("column1", "CHAR", false)
    val oracleDataTypeConverter = new OracleDataTypeConverter()
    val result                  = oracleDataTypeConverter.converterValue(resultSet, column)
    result shouldBe "value1"
    result.isInstanceOf[String] shouldBe true
  }

  @Test
  def testConverterRawValue(): Unit = {
    val resultSet: ResultSet = Mockito.mock(classOf[ResultSet])
    when(resultSet.getBytes("column1")).thenReturn("aaaa".getBytes)
    val column                  = RdbColumn("column1", "RAW", false)
    val oracleDataTypeConverter = new OracleDataTypeConverter()
    val result                  = oracleDataTypeConverter.converterValue(resultSet, column)
    result.isInstanceOf[Array[Byte]] shouldBe true
    new String(result.asInstanceOf[Array[Byte]]) shouldBe "aaaa"
  }

  @Test
  def testConverterRawNullValue(): Unit = {
    val resultSet: ResultSet = Mockito.mock(classOf[ResultSet])
    when(resultSet.getBytes("column1")).thenReturn(null)
    val column                  = RdbColumn("column1", "RAW", false)
    val oracleDataTypeConverter = new OracleDataTypeConverter()
    val result                  = oracleDataTypeConverter.converterValue(resultSet, column)
    result.isInstanceOf[Array[Byte]] shouldBe true
    result.asInstanceOf[Array[Byte]].length shouldBe 0
  }

  @Test
  def testConverterSmallIntValue(): Unit = {
    val resultSet: ResultSet = Mockito.mock(classOf[ResultSet])
    when(resultSet.getInt("column1")).thenReturn(111)
    val column                  = RdbColumn("column1", "INT", false)
    val oracleDataTypeConverter = new OracleDataTypeConverter()
    val result                  = oracleDataTypeConverter.converterValue(resultSet, column)
    result.isInstanceOf[Integer] shouldBe true
    result.asInstanceOf[Integer] shouldBe 111
  }
} 
Example 3
Source File: TsStreamingTest.scala    From spark-riak-connector   with Apache License 2.0 7 votes vote down vote up
package com.basho.riak.spark.streaming

import java.nio.ByteBuffer
import java.util.concurrent.{Callable, Executors, TimeUnit}

import com.basho.riak.spark._
import com.basho.riak.spark.rdd.RiakTSTests
import com.basho.riak.spark.rdd.timeseries.{AbstractTimeSeriesTest, TimeSeriesData}
import com.fasterxml.jackson.core.JsonParser
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.spark.sql.Row
import org.junit.Assert._
import org.junit.experimental.categories.Category
import org.junit.{After, Before, Test}

@Category(Array(classOf[RiakTSTests]))
class TsStreamingTest extends AbstractTimeSeriesTest(false) with SparkStreamingFixture {

  protected final val executorService = Executors.newCachedThreadPool()
  private val dataSource = new SocketStreamingDataSource
  private var port = -1

  @Before
  def setUp(): Unit = {
    port = dataSource.start(client => {
      testData
        .map(tolerantMapper.writeValueAsString)
        .foreach(x => client.write(ByteBuffer.wrap(s"$x\n".getBytes)))
      logInfo(s"${testData.length} values were send to client")
    })
  }

  @After
  def tearDown(): Unit = {
    dataSource.stop()
  }

  @Test(timeout = 10 * 1000) // 10 seconds timeout
  def saveToRiak(): Unit = {
    executorService.submit(new Runnable {
      override def run(): Unit = {
        ssc.socketTextStream("localhost", port)
          .map(string => {
            val tsdata = new ObjectMapper()
              .configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true)
              .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true)
              .configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
              .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
              .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
              .registerModule(DefaultScalaModule)
              .readValue(string, classOf[TimeSeriesData])
            Row(1, "f", tsdata.time, tsdata.user_id, tsdata.temperature_k)
          })
          .saveToRiakTS(bucketName)

        ssc.start()
        ssc.awaitTerminationOrTimeout(5 * 1000)
      }
    })

    val result = executorService.submit(new Callable[Array[Seq[Any]]] {
      override def call(): Array[Seq[Any]] = {
        var rdd = sc.riakTSTable[Row](bucketName)
          .sql(s"SELECT user_id, temperature_k FROM $bucketName $sqlWhereClause")
        var count = rdd.count()
        while (count < testData.length) {
          TimeUnit.SECONDS.sleep(2)

          rdd = sc.riakTSTable[Row](bucketName)
            .sql(s"SELECT user_id, temperature_k FROM $bucketName $sqlWhereClause")
          count = rdd.count()
        }
        rdd.collect().map(_.toSeq)
      }
    }).get()

    assertEquals(testData.length, result.length)
    assertEqualsUsingJSONIgnoreOrder(
      """
        |[
        |   ['bryce',305.37],
        |   ['bryce',300.12],
        |   ['bryce',295.95],
        |   ['ratman',362.121],
        |   ['ratman',3502.212]
        |]
      """.stripMargin, result)
  }
} 
Example 4
Source File: ChronoLocalDateTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
package org.scalajs.testsuite.javalib.time.chrono

import java.time.{DateTimeException, LocalTime, LocalDate}
import java.time.chrono.ChronoLocalDate

import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._

class ChronoLocalDateTest {
  import ChronoLocalDate._

  @Test def test_timeLineOrder(): Unit = {
    val ord = timeLineOrder
    val ds = Seq(LocalDate.MIN, LocalDate.of(2011, 2, 28), LocalDate.MAX)

    for {
      d1 <- ds
      d2 <- ds
    } {
      assertEquals(math.signum(d1.compareTo(d2)),
          math.signum(ord.compare(d1, d2)))
    }
  }

  @Test def test_from(): Unit = {
    for (d <- Seq(LocalDate.MIN, LocalDate.of(2011, 2, 28), LocalDate.MAX))
      assertEquals(d, from(d))

    for (t <- Seq(LocalTime.MIN, LocalTime.NOON, LocalTime.MAX))
      expectThrows(classOf[DateTimeException], from(t))
  }
} 
Example 5
Source File: ChronoPeriodTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
package org.scalajs.testsuite.javalib.time.chrono

import java.time.LocalDate
import java.time.chrono.ChronoPeriod

import org.junit.Test
import org.junit.Assert.assertEquals

class ChronoPeriodTest {
  @Test def test_between(): Unit = {
    val ds = Seq(LocalDate.MIN, LocalDate.of(2011, 2, 28), LocalDate.MAX)
    for {
      d1 <- ds
      d2 <- ds
    } {
      assertEquals(d1.until(d2), ChronoPeriod.between(d1, d2))
    }
  }
} 
Example 6
Source File: TestShabondiDefinitions.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.shabondi

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ClassType
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestShabondiDefinitions extends OharaTest {
  @Test
  def testSourceKind(): Unit = {
    ShabondiDefinitions.sourceDefinitions
      .find(_.key() == "kind")
      .get
      .defaultString() shouldBe ClassType.SOURCE.key()
  }

  @Test
  def testSinkKind(): Unit = {
    ShabondiDefinitions.sinkDefinitions
      .find(_.key() == "kind")
      .get
      .defaultString() shouldBe ClassType.SINK.key()
  }
} 
Example 7
Source File: TestSinkConfig.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.shabondi.sink

import java.time.{Duration => JDuration}

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.TopicKey
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._

final class TestSinkConfig extends OharaTest {
  private def topicKey1 = TopicKey.of("default", "topic1")
  private def topicKey2 = TopicKey.of("default", "topic2")

  @Test
  def test(): Unit = {
    import oharastream.ohara.shabondi.ShabondiDefinitions._
    val jsonSinkTopicKeys = TopicKey.toJsonString(Seq(topicKey1, topicKey2).asJava)
    val args = Map(
      CLIENT_PORT_DEFINITION.key       -> "8080",
      SINK_FROM_TOPICS_DEFINITION.key  -> jsonSinkTopicKeys,
      SINK_POLL_TIMEOUT_DEFINITION.key -> "1500 milliseconds",
      SINK_GROUP_IDLETIME.key          -> "180 seconds"
    )
    val config = new SinkConfig(args)
    config.port should ===(8080)

    val topicKeys = Seq(TopicKey.of("default", "topic1"), TopicKey.of("default", "topic2"))

    config.sinkFromTopics.size should ===(2)
    config.sinkFromTopics should contain(topicKeys(0))
    config.sinkFromTopics should contain(topicKeys(1))
    config.sinkPollTimeout should ===(JDuration.ofMillis(1500))
    config.sinkGroupIdleTime should ===(JDuration.ofSeconds(180))
  }
} 
Example 8
Source File: TestSourceConfig.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.shabondi.source

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.TopicKey
import org.junit.Test

import scala.jdk.CollectionConverters._
import org.scalatest.matchers.should.Matchers._

final class TestSourceConfig extends OharaTest {
  private def topicKey1 = TopicKey.of("default", "topic1")
  private def topicKey2 = TopicKey.of("default", "topic2")

  @Test
  def test(): Unit = {
    import oharastream.ohara.shabondi.ShabondiDefinitions._
    val jsonSourceTopicKeys = TopicKey.toJsonString(Seq(topicKey1, topicKey2).asJava)
    val args = Map(
      CLIENT_PORT_DEFINITION.key      -> "8080",
      SOURCE_TO_TOPICS_DEFINITION.key -> jsonSourceTopicKeys
    )
    val config = new SourceConfig(args)
    config.port should ===(8080)

    val topicKeys = Seq(TopicKey.of("default", "topic1"), TopicKey.of("default", "topic2"))

    config.sourceToTopics.size should ===(2)
    config.sourceToTopics(0) should ===(topicKeys(0))
    config.sourceToTopics(1) should ===(topicKeys(1))
  }
} 
Example 9
Source File: TestSourceRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.shabondi.source

import java.util.concurrent.TimeUnit

import akka.http.scaladsl.model._
import akka.http.scaladsl.testkit.RouteTestTimeout
import oharastream.ohara.common.data.Row
import oharastream.ohara.kafka.Consumer
import oharastream.ohara.metrics.BeanChannel
import oharastream.ohara.metrics.basic.CounterMBean
import oharastream.ohara.shabondi.{BasicShabondiTest, KafkaSupport}
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json.DefaultJsonProtocol._
import spray.json._

import scala.concurrent.duration.Duration
import scala.jdk.CollectionConverters._
final class TestSourceRoute extends BasicShabondiTest {
  import oharastream.ohara.shabondi.ShabondiRouteTestSupport._

  // Extend the timeout to avoid the exception:
  // org.scalatest.exceptions.TestFailedException: Request was neither completed nor rejected within 1 second
  implicit val routeTestTimeout = RouteTestTimeout(Duration(5, TimeUnit.SECONDS))

  private val columnCount  = 6
  private val requestCount = 200

  private def sourceData: Map[String, Int] =
    (1 to columnCount).foldLeft(Map.empty[String, Int]) { (m, v) =>
      m + ("col-" + v -> v)
    }

  @Test
  def testInvalidRequest(): Unit = {
    val topicKey1 = createTopicKey
    val config    = defaultSourceConfig(Seq(topicKey1))
    val webServer = new WebServer(config)

    val request = Get("/")
    request ~> webServer.routes ~> check {
      response.status should ===(StatusCodes.MethodNotAllowed)
      contentType should ===(ContentTypes.`text/plain(UTF-8)`)
    }

    val request2 = Post("/")
    request2 ~> webServer.routes ~> check {
      response.status should ===(StatusCodes.BadRequest)
      contentType should ===(ContentTypes.`text/plain(UTF-8)`)
    }

    val jsonRow  = sourceData.toJson.compactPrint
    val entity   = HttpEntity(ContentTypes.`application/json`, jsonRow)
    val request3 = Post("/", entity)
    request3 ~> webServer.routes ~> check {
      response.status should ===(StatusCodes.OK)
      contentType should ===(ContentTypes.`text/plain(UTF-8)`)
    }
  }

  @Test
  def testSourceRoute(): Unit = {
    val topicKey1 = createTopicKey
    val config    = defaultSourceConfig(Seq(topicKey1))
    val webServer = new WebServer(config)
    try {
      (1 to requestCount).foreach { _ =>
        val jsonRow = sourceData.toJson.compactPrint
        val entity  = HttpEntity(ContentTypes.`application/json`, jsonRow)
        val request = Post(uri = "/", entity)

        request ~> webServer.routes ~> check {
          entityAs[String] should ===("OK")
        }
      }

      // assertion
      val rowsTopic1: Seq[Consumer.Record[Row, Array[Byte]]] =
        KafkaSupport.pollTopicOnce(brokerProps, topicKey1, 60, requestCount)
      rowsTopic1.size should ===(requestCount)
      rowsTopic1(0).key.get.cells.size should ===(columnCount)

      // assert metrics
      val beans = counterMBeans()
      beans.size should ===(1)
      beans(0).getValue should ===(requestCount)
    } finally {
      webServer.close()
      topicAdmin.deleteTopic(topicKey1)
    }
  }

  private def counterMBeans(): Seq[CounterMBean] = BeanChannel.local().counterMBeans().asScala.toSeq
} 
Example 10
Source File: TestKafkaClient.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.shabondi.common

import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import oharastream.ohara.common.data.{Cell, Row}
import oharastream.ohara.common.setting.TopicKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.shabondi.{BasicShabondiTest, KafkaSupport}
import org.junit.{After, Before, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits._
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

final class TestKafkaClient extends BasicShabondiTest {
  import oharastream.ohara.shabondi.common.ConvertSupport._

  implicit lazy val system: ActorSystem = ActorSystem("shabondi-test")

  private[this] val topicKey = TopicKey.of("group", CommonUtils.randomString(5))

  @Before
  def before(): Unit = createTestTopic(topicKey)

  @After
  override def tearDown(): Unit =
    topicAdmin.deleteTopic(topicKey)

  @Test
  def testSingleProducer(): Unit = {
    val producer = KafkaSupport.newProducer(brokerProps)
    try {
      val row = Row.of(Cell.of("col1", 100))
      val sender = producer
        .sender()
        .key(row)
        .value(Array[Byte]())
        .topicKey(topicKey)

      val future = sender.send.toScala

      val metadata = Await.result(future, Duration(10, TimeUnit.SECONDS))

      metadata.topicKey should ===(topicKey)
      metadata.offset should ===(0)
      metadata.partition should ===(0)
    } finally {
      Releasable.close(producer)
    }
  }

  @Test
  def testConsumer(): Unit = {
    val producer = KafkaSupport.newProducer(brokerProps)
    try {
      Future.sequence {
        (1 to 9)
          .map(i => Row.of(Cell.of(s"col-$i", i * 10)))
          .map(row => producer.sender().key(row).value(Array[Byte]()).topicKey(topicKey))
          .map { sender =>
            sender.send.toScala
          }
      }

      val records = KafkaSupport.pollTopicOnce(brokerProps, topicKey, 10, 10)

      records.size should ===(9)
      records(0).topicKey shouldBe topicKey
      records(0).key.isPresent shouldBe true
      records(0).key.get shouldBe Row.of(Cell.of("col-1", 10))

      records(8).topicKey shouldBe topicKey
      records(8).key.isPresent shouldBe true
      records(8).key.get shouldBe Row.of(Cell.of("col-9", 90))
    } finally {
      Releasable.close(producer)
    }
  }
} 
Example 11
Source File: TestJsonSupport.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.shabondi.common

import oharastream.ohara.common.data.{Cell, Row}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json._

import scala.jdk.CollectionConverters._

final class TestJsonSupport extends OharaTest {
  @Test
  def testRowData(): Unit = {
    val jsonData =
      """
        |{"col1":"hello", "col2": 200}
        |""".stripMargin

    val rowData: JsonSupport.RowData = JsonSupport.rowDataFormat.read(jsonData.parseJson)

    rowData("col1") should ===(JsString("hello"))
    rowData("col2") should ===(JsNumber(200))

    val row = JsonSupport.toRow(rowData)

    row.cell(0) should ===(Cell.of("col1", "hello"))
    row.cell(1) should ===(Cell.of("col2", 200))
  }

  @Test
  def testConversion(): Unit = {
    val json =
      """
        |  {
        |    "a": "b",
        |    "b": 123,
        |    "c": false,
        |    "d": null,
        |    "e": [
        |      "a",
        |      "c"
        |    ],
        |    "f": [
        |      {
        |        "f0": "v",
        |        "f1": 123,
        |        "tags": []
        |      }
        |    ],
        |    "g": {
        |      "a": "c",
        |      "d": 123,
        |      "dd": true,
        |      "tags": []
        |    },
        |    "tags": []
        |  }
        |""".stripMargin.parseJson.asJsObject

    val row   = JsonSupport.toRow(json)
    val json2 = JsonSupport.toJson(row)
    JsObject(JsonSupport.noJsNull(json.fields)) shouldBe json2
  }

  @Test
  def testTags(): Unit = {
    val tags = Seq(CommonUtils.randomString(), CommonUtils.randomString())
    val row  = Row.of(tags.asJava, Cell.of("a", "b"))
    val json = JsonSupport.toJson(row)
    json.fields(JsonSupport.TAGS_KEY).asInstanceOf[JsArray].elements.map(_.asInstanceOf[JsString].value) shouldBe tags

    val row2 = JsonSupport.toRow(json)
    row2.tags().asScala shouldBe tags
  }

  @Test
  def testTimestamp(): Unit = {
    val key       = "a"
    val timestamp = new java.sql.Timestamp(System.currentTimeMillis())
    val row       = Row.of(Cell.of(key, timestamp))
    JsonSupport.toJson(row).fields(key).asInstanceOf[JsString].value shouldBe timestamp.toString
  }
} 
Example 12
Source File: TestVersionFile.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.script

import oharastream.ohara.common.util.{Releasable, VersionUtils}
import oharastream.ohara.it.{ContainerPlatform, IntegrationTest, ServiceKeyHolder}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global


  @Test
  def testBroker(): Unit = testVersion(s"oharastream/broker:${VersionUtils.VERSION}", Set("ohara"))

  private[this] def testVersion(imageName: String, expectedStrings: Set[String]): Unit = platform.nodeNames.foreach {
    hostname =>
      val key           = serviceKeyHolder.generateClusterKey()
      val containerName = s"${key.group()}-${key.name()}"
      val versionString: String = result(
        containerClient.containerCreator
          .imageName(imageName)
          .command("-v")
          .name(containerName)
          .nodeName(hostname)
          .create()
          .flatMap(_ => containerClient.log(containerName).map(_.head._2))
      )
      expectedStrings.foreach(s => versionString should include(s))
  }

  @After
  def releaseConfigurator(): Unit = {
    Releasable.close(serviceKeyHolder)
    Releasable.close(resourceRef)
  }
} 
Example 13
Source File: TestPerformance4FtpSource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.ftp.FtpSource
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4FtpSource extends BasicTestPerformance4Ftp {
  @Test
  def test(): Unit = {
    val ftp = ftpClient()
    try {
      createTopic()
      val completedPath = "/completed"
      val errorPath     = "/error"
      val (path, _, _)  = setupInputData(timeoutOfInputData)
      try {
        loopInputDataThread(setupInputData)
        setupConnector(
          connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
          className = classOf[FtpSource].getName,
          settings = ftpSettings
            + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path))
            + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString(
              PerformanceTestingUtils.createFolder(ftp, completedPath)
            ))
            + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString(
              PerformanceTestingUtils.createFolder(ftp, errorPath)
            ))
            + (CsvConnectorDefinitions.SIZE_OF_FILE_CACHE_KEY -> JsNumber(fileNameCacheSize))
        )
        sleepUntilEnd()
      } finally if (cleanupTestData) {
        PerformanceTestingUtils.deleteFolder(ftp, path)
        PerformanceTestingUtils.deleteFolder(ftp, completedPath)
        PerformanceTestingUtils.deleteFolder(ftp, errorPath)
      }
    } finally Releasable.close(ftp)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 14
Source File: TestPerformanceReport.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestPerformanceReport extends OharaTest {
  private[this] val groupName = "benchmark"

  @Test
  def testCleanValue(): Unit = {
    val headerName                        = "header1"
    val report: PerformanceReport.Builder = PerformanceReport.builder
    val record = report
      .connectorKey(ConnectorKey.of(groupName, CommonUtils.randomString(5)))
      .className("class")
      .record(1, headerName, 100)
      .build
    // Before clean value
    record.records.get(1).get(headerName) shouldBe 100

    // After clean value
    report
      .resetValue(1, headerName)
      .build
      .records
      .get(1)
      .get(headerName) shouldBe 0.0
  }

  @Test
  def testRecord1(): Unit = {
    val headerName                        = "header1"
    val report: PerformanceReport.Builder = PerformanceReport.builder
    report
      .connectorKey(ConnectorKey.of(groupName, CommonUtils.randomString(5)))
      .className("class")
      .record(1, headerName, 100)
      .record(1, headerName, 200)
      .record(1, headerName, 300)
      .build
      .records
      .get(1)
      .get(headerName) shouldBe 600
  }

  @Test
  def testRecord2(): Unit = {
    val headerName                        = "header1"
    val report: PerformanceReport.Builder = PerformanceReport.builder
    report
      .connectorKey(ConnectorKey.of(groupName, CommonUtils.randomString(5)))
      .className("class")
      .resetValue(1, headerName)
      .record(1, headerName, 100)
      .record(1, headerName, 200)
      .record(1, headerName, 300)
      .resetValue(1, headerName)
      .record(1, headerName, 100)
      .record(1, headerName, 200)
      .record(1, headerName, 300)
      .build
      .records
      .get(1)
      .get(headerName) shouldBe 600
  }
} 
Example 15
Source File: TestPerformance4SambaSource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.connector.smb.SmbSource
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4SambaSource extends BasicTestPerformance4Samba {
  @Test
  def test(): Unit = {
    val samba = sambaClient()
    createTopic()
    val completedPath = "completed"
    val errorPath     = "error"
    val (path, _, _)  = setupInputData(timeoutOfInputData)

    try {
      loopInputDataThread(setupInputData)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[SmbSource].getName,
        settings = sambaSettings
          + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path))
          + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(samba, completedPath)
          ))
          + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(samba, errorPath)
          ))
          + (CsvConnectorDefinitions.SIZE_OF_FILE_CACHE_KEY -> JsNumber(fileNameCacheSize))
      )
      sleepUntilEnd()
    } finally if (needDeleteData) {
      PerformanceTestingUtils.deleteFolder(samba, path)
      PerformanceTestingUtils.deleteFolder(samba, completedPath)
      PerformanceTestingUtils.deleteFolder(samba, errorPath)
    }
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 16
Source File: TestPerformance4HdfsSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.hdfs.sink.HDFSSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}
import org.junit.Test

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4HdfsSink extends BasicTestPerformance {
  private[this] val NEED_DELETE_DATA_KEY: String = PerformanceTestingUtils.DATA_CLEANUP_KEY
  private[this] val needDeleteData: Boolean      = sys.env.getOrElse(NEED_DELETE_DATA_KEY, "true").toBoolean

  @Test
  def test(): Unit = {
    val hdfs = hdfsClient()
    try {
      createTopic()
      produce(timeoutOfInputData)
      loopInputDataThread(produce)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[HDFSSink].getName(),
        settings = Map(
          CsvConnectorDefinitions.FLUSH_SIZE_KEY             -> JsNumber(numberOfCsvFileToFlush),
          oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY -> JsString(PerformanceTestingUtils.hdfsURL),
          oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir)
          )
        )
      )
      sleepUntilEnd()
    } finally Releasable.close(hdfs)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorInfo],
    topicInfos: Seq[TopicInfo]
  ): Unit = {
    if (needDeleteData) {
      //Delete file from the HDFS
      val hdfs = hdfsClient()
      try topicInfos.foreach { topicInfo =>
        val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}"
        PerformanceTestingUtils.deleteFolder(hdfs, path)
      } finally Releasable.close(hdfs)
    }
  }

  private[this] def hdfsClient(): FileSystem = {
    FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build
  }
} 
Example 17
Source File: TestPerformance4JDBCSourceToHDFSSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.hdfs.sink.HDFSSink
import oharastream.ohara.connector.jdbc.source.JDBCSourceConnector
import oharastream.ohara.it.category.PerformanceGroup
import org.junit.experimental.categories.Category
import org.junit.Test
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4JDBCSourceToHDFSSink extends BasicTestPerformance4Jdbc {
  override protected val tableName: String = s"TABLE${CommonUtils.randomString().toUpperCase()}"

  @Test
  def test(): Unit = {
    val hdfs = hdfsClient()
    try {
      createTable()
      setupInputData(timeoutOfInputData)
      loopInputDataThread(setupInputData)
      createTopic()

      //Running JDBC Source Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[JDBCSourceConnector].getName(),
        settings = Map(
          oharastream.ohara.connector.jdbc.source.DB_URL                -> JsString(url),
          oharastream.ohara.connector.jdbc.source.DB_USERNAME           -> JsString(user),
          oharastream.ohara.connector.jdbc.source.DB_PASSWORD           -> JsString(password),
          oharastream.ohara.connector.jdbc.source.DB_TABLENAME          -> JsString(tableName),
          oharastream.ohara.connector.jdbc.source.TIMESTAMP_COLUMN_NAME -> JsString(timestampColumnName),
          oharastream.ohara.connector.jdbc.source.DB_SCHEMA_PATTERN     -> JsString(user),
          oharastream.ohara.connector.jdbc.source.JDBC_FETCHDATA_SIZE   -> JsNumber(10000),
          oharastream.ohara.connector.jdbc.source.JDBC_FLUSHDATA_SIZE   -> JsNumber(10000)
        )
      )

      //Running HDFS Sink Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[HDFSSink].getName(),
        settings = Map(
          oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY   -> JsString(PerformanceTestingUtils.hdfsURL),
          oharastream.ohara.connector.hdfs.sink.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush),
          oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir)
          )
        )
      )
      sleepUntilEnd()
    } finally Releasable.close(hdfs)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorInfo],
    topicInfos: Seq[TopicInfo]
  ): Unit = {
    if (needDeleteData) {
      //Drop table for the database
      client.dropTable(tableName)

      //Delete file from the HDFS
      val hdfs = hdfsClient()
      try {
        topicInfos.foreach { topicInfo =>
          val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}"
          PerformanceTestingUtils.deleteFolder(hdfs, path)
        }
      } finally Releasable.close(hdfs)
    }
  }

  private[this] def hdfsClient(): FileSystem = {
    FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build
  }
} 
Example 18
Source File: TestPerformance4FtpSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.ftp.FtpSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import spray.json.{JsNumber, JsString}
import org.junit.Test
import org.junit.experimental.categories.Category

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4FtpSink extends BasicTestPerformance4Ftp {
  private[this] val dataDir: String = "/tmp"

  @Test
  def test(): Unit = {
    val ftp = ftpClient()
    try {
      createTopic()
      produce(timeoutOfInputData)
      loopInputDataThread(produce)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[FtpSink].getName(),
        settings = ftpSettings
          ++ Map(
            CsvConnectorDefinitions.OUTPUT_FOLDER_KEY -> JsString(PerformanceTestingUtils.createFolder(ftp, dataDir)),
            CsvConnectorDefinitions.FLUSH_SIZE_KEY    -> JsNumber(numberOfCsvFileToFlush)
          )
      )
      sleepUntilEnd()
    } finally Releasable.close(ftp)
  }

  override protected def afterStoppingConnectors(connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo]): Unit =
    if (cleanupTestData)
      topicInfos.foreach { topicInfo =>
        val path = s"${dataDir}/${topicInfo.topicNameOnKafka}"
        val ftp  = ftpClient()
        try if (PerformanceTestingUtils.exists(ftp, path)) PerformanceTestingUtils.deleteFolder(ftp, path)
        finally Releasable.close(ftp)
      }
} 
Example 19
Source File: TestPerformance4FtpSourceToHDFSSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.ftp.FtpSource
import oharastream.ohara.connector.hdfs.sink.HDFSSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4FtpSourceToHDFSSink extends BasicTestPerformance4Ftp {
  private[this] val ftpCompletedPath = "/completed"
  private[this] val ftpErrorPath     = "/error"
  private[this] val (path, _, _)     = setupInputData(timeoutOfInputData)

  @Test
  def test(): Unit = {
    val ftp  = ftpClient()
    val hdfs = hdfsClient()
    try {
      createTopic()
      loopInputDataThread(setupInputData)
      //Running FTP Source Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[FtpSource].getName,
        settings = ftpSettings
          + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path))
          + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(ftp, ftpCompletedPath)
          ))
          + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(ftp, ftpErrorPath)
          ))
      )

      //Running HDFS Sink Connector
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[HDFSSink].getName(),
        settings = Map(
          oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY   -> JsString(PerformanceTestingUtils.hdfsURL),
          oharastream.ohara.connector.hdfs.sink.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush),
          oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString(
            PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir)
          )
        )
      )
      sleepUntilEnd()
    } finally {
      Releasable.close(hdfs)
      Releasable.close(ftp)
    }
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorInfo],
    topicInfos: Seq[TopicInfo]
  ): Unit = {
    if (cleanupTestData) {
      //Delete file for the FTP
      val ftp  = ftpClient()
      val hdfs = hdfsClient()
      try {
        PerformanceTestingUtils.deleteFolder(ftp, path)
        PerformanceTestingUtils.deleteFolder(ftp, ftpCompletedPath)
        PerformanceTestingUtils.deleteFolder(ftp, ftpErrorPath)

        //Delete file from the HDFS
        topicInfos.foreach { topicInfo =>
          val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}"
          PerformanceTestingUtils.deleteFolder(hdfs, path)
        }
      } finally {
        Releasable.close(hdfs)
        Releasable.close(ftp)
      }
    }
  }

  private[this] def hdfsClient(): FileSystem = {
    FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build
  }
} 
Example 20
Source File: TestPerformance4Oracle.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.connector.jdbc.source.JDBCSourceConnector
import oharastream.ohara.it.category.PerformanceGroup
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4Oracle extends BasicTestPerformance4Jdbc {
  override protected val tableName: String =
    s"TABLE${CommonUtils.randomString().toUpperCase()}"

  @Test
  def test(): Unit = {
    createTable()
    setupInputData(timeoutOfInputData)
    loopInputDataThread(setupInputData)
    createTopic()
    try {
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[JDBCSourceConnector].getName(),
        settings = Map(
          oharastream.ohara.connector.jdbc.source.DB_URL                -> JsString(url),
          oharastream.ohara.connector.jdbc.source.DB_USERNAME           -> JsString(user),
          oharastream.ohara.connector.jdbc.source.DB_PASSWORD           -> JsString(password),
          oharastream.ohara.connector.jdbc.source.DB_TABLENAME          -> JsString(tableName),
          oharastream.ohara.connector.jdbc.source.TIMESTAMP_COLUMN_NAME -> JsString(timestampColumnName),
          oharastream.ohara.connector.jdbc.source.DB_SCHEMA_PATTERN     -> JsString(user),
          oharastream.ohara.connector.jdbc.source.JDBC_FETCHDATA_SIZE   -> JsNumber(10000),
          oharastream.ohara.connector.jdbc.source.JDBC_FLUSHDATA_SIZE   -> JsNumber(10000)
        )
      )
      sleepUntilEnd()
    } finally if (needDeleteData) client.dropTable(tableName)
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 21
Source File: TestPerformance4SambaSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo
import oharastream.ohara.client.configurator.TopicApi.TopicInfo
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.connector.smb.SmbSink
import oharastream.ohara.it.category.PerformanceGroup
import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions
import org.junit.Test
import org.junit.experimental.categories.Category
import spray.json.{JsNumber, JsString}

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4SambaSink extends BasicTestPerformance4Samba {
  private[this] val outputDir: String = "output"

  @Test
  def test(): Unit = {
    val samba = sambaClient()
    try {
      createTopic()
      produce(timeoutOfInputData)
      loopInputDataThread(produce)
      setupConnector(
        connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
        className = classOf[SmbSink].getName,
        settings = sambaSettings
          ++ Map(
            CsvConnectorDefinitions.OUTPUT_FOLDER_KEY -> JsString(
              PerformanceTestingUtils.createFolder(samba, outputDir)
            ),
            CsvConnectorDefinitions.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush)
          )
      )
      sleepUntilEnd()
    } finally Releasable.close(samba)
  }

  override protected def afterStoppingConnectors(connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo]): Unit =
    if (needDeleteData)
      topicInfos.foreach { topicInfo =>
        val path  = s"$outputDir/${topicInfo.topicNameOnKafka}"
        val samba = sambaClient()
        try {
          if (PerformanceTestingUtils.exists(samba, path)) PerformanceTestingUtils.deleteFolder(samba, path)
        } finally Releasable.close(samba)
      }
} 
Example 22
Source File: TestPerformance4PerfSource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.performance

import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi}
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.connector.perf.PerfSource
import oharastream.ohara.it.category.PerformanceGroup
import org.junit.Test
import org.junit.experimental.categories.Category

@Category(Array(classOf[PerformanceGroup]))
class TestPerformance4PerfSource extends BasicTestPerformance {
  @Test
  def test(): Unit = {
    createTopic()
    setupConnector(
      connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)),
      className = classOf[PerfSource].getName,
      settings = Map.empty
    )
    sleepUntilEnd()
  }

  override protected def afterStoppingConnectors(
    connectorInfos: Seq[ConnectorApi.ConnectorInfo],
    topicInfos: Seq[TopicApi.TopicInfo]
  ): Unit = {}
} 
Example 23
Source File: TestSettingDefinitionsOfDumbConnector.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.code

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.SettingDef
import oharastream.ohara.it.connector.{IncludeAllTypesSinkConnector, IncludeAllTypesSourceConnector}
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._


class TestSettingDefinitionsOfDumbConnector extends OharaTest {
  @Test
  def allTypesShouldBeIncludedByDumbSource(): Unit =
    verify((new IncludeAllTypesSourceConnector).settingDefinitions().values().asScala.toSeq)

  @Test
  def allTypesShouldBeIncludedByDumbSink(): Unit =
    verify((new IncludeAllTypesSinkConnector).settingDefinitions().values().asScala.toSeq)

  private[this] def verify(settingDefs: Seq[SettingDef]): Unit =
    SettingDef.Type.values().foreach(t => settingDefs.map(_.valueType()).toSet should contain(t))
} 
Example 24
Source File: ClassUtils.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.code

import java.io.FileInputStream
import java.lang.reflect.Modifier
import java.util.jar.JarInputStream
import java.util.regex.Pattern

import org.junit.Test

import scala.jdk.CollectionConverters._

private[code] object ClassUtils {
  
  def classesInProductionScope(): Seq[Class[_]] = allClasses(n => !n.contains("tests.jar"))

  def allClasses(fileNameFilter: String => Boolean): Seq[Class[_]] = {
    val classLoader = ClassLoader.getSystemClassLoader
    val path        = "oharastream/ohara"
    val pattern     = Pattern.compile("^file:(.+\\.jar)!/" + path + "$")
    val urls        = classLoader.getResources(path)
    urls.asScala
      .map(url => pattern.matcher(url.getFile))
      .filter(_.find())
      .map(_.group(1))
      .filter(fileNameFilter)
      .flatMap { f =>
        val jarInput = new JarInputStream(new FileInputStream(f))
        try Iterator
          .continually(jarInput.getNextJarEntry)
          .takeWhile(_ != null)
          .map(_.getName)
          .toArray
          .filter(_.endsWith(".class"))
          .map(_.replace('/', '.'))
          .map(className => className.substring(0, className.length - ".class".length))
          .map(Class.forName)
        finally jarInput.close()
      }
      .toSeq
  }
} 
Example 25
Source File: TestQueryConfiguratorLog.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.client

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.LogApi
import oharastream.ohara.it.{ContainerPlatform, WithRemoteConfigurator}
import oharastream.ohara.it.category.ClientGroup
import org.junit.Test
import org.junit.experimental.categories.Category
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global

@Category(Array(classOf[ClientGroup]))
class TestQueryConfiguratorLog(platform: ContainerPlatform)
    extends WithRemoteConfigurator(platform: ContainerPlatform) {
  @Test
  def test(): Unit = {
    val log = result(LogApi.access.hostname(configuratorHostname).port(configuratorPort).log4Configurator())
    log.logs.size shouldBe 1
    log.logs.head.hostname.length should not be 0
    log.logs.head.value.length should not be 0

    val logOf1Second = result(LogApi.access.hostname(configuratorHostname).port(configuratorPort).log4Configurator(1)).logs.head.value
    TimeUnit.SECONDS.sleep(6)
    val logOf6Second = result(LogApi.access.hostname(configuratorHostname).port(configuratorPort).log4Configurator(6)).logs.head.value
    withClue(s"logOf1Second:$logOf1Second\nlogOf6Second:$logOf6Second") {
      // it counts on timer so the "=" is legal :)
      logOf1Second.length should be <= logOf6Second.length
    }
  }
} 
Example 26
Source File: TestListNode.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.client

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.it.category.ClientGroup
import oharastream.ohara.it.{ContainerPlatform, WithRemoteConfigurator}
import org.junit.Test
import org.junit.experimental.categories.Category
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global

@Category(Array(classOf[ClientGroup]))
class TestListNode(platform: ContainerPlatform) extends WithRemoteConfigurator(platform: ContainerPlatform) {
  @Test
  def test(): Unit = {
    val services =
      result(NodeApi.access.hostname(configuratorHostname).port(configuratorPort).list()).flatMap(_.services)
    services should not be Seq.empty
    services.find(_.name == NodeApi.CONFIGURATOR_SERVICE_NAME) should not be None
  }
} 
Example 27
Source File: TestNodeResource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.client

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.it.category.ClientGroup
import oharastream.ohara.it.{ContainerPlatform, WithRemoteConfigurator}
import org.junit.Test
import org.junit.experimental.categories.Category
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global

@Category(Array(classOf[ClientGroup]))
class TestNodeResource(platform: ContainerPlatform) extends WithRemoteConfigurator(platform: ContainerPlatform) {
  private[this] def nodeApi: NodeApi.Access = NodeApi.access.hostname(configuratorHostname).port(configuratorPort)

  @Test
  def testResources(): Unit = {
    val nodes = result(nodeApi.list())
    nodes should not be Seq.empty
    nodes.foreach { node =>
      nodes.exists(_.hostname == node.hostname) shouldBe true
      node.resources should not be Seq.empty
      node.resources.size should be >= 1
      node.resources.foreach { resource =>
        resource.value.toInt should be >= 1
        resource.name.isEmpty shouldBe false
        resource.unit.isEmpty shouldBe false
      }
    }
  }

  @Test
  def testStatus(): Unit = {
    val nodes = result(nodeApi.list())
    nodes should not be Seq.empty
    nodes.foreach { node =>
      nodes.exists(_.hostname == node.hostname) shouldBe true
      node.state shouldBe NodeApi.State.AVAILABLE
      node.error shouldBe None
    }
  }
} 
Example 28
Source File: TestVerifyNode.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.docker

import java.util
import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.{DataCollie, ServiceCollie}
import oharastream.ohara.client.configurator.NodeApi.{Node, State}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.service.SshdServer
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration


class TestVerifyNode extends OharaTest {
  private[this] var errorMessage: String = _
  private[this] val sshServer = SshdServer.local(
    0,
    java.util.Map.of(
      "docker info --format '{{json .}}'",
      (_: String) =>
        if (errorMessage != null)
          throw new IllegalArgumentException(errorMessage)
        else util.List.of("""
                        |  {
                        |    "NCPU": 1,
                        |    "MemTotal": 1024
                        |  }
                        |""".stripMargin)
    )
  )

  private[this] val node = Node(
    hostname = sshServer.hostname(),
    port = sshServer.port(),
    user = sshServer.user(),
    password = sshServer.password(),
    services = Seq.empty,
    state = State.AVAILABLE,
    error = None,
    lastModified = CommonUtils.current(),
    resources = Seq.empty,
    tags = Map.empty
  )

  private[this] val collie = ServiceCollie.dockerModeBuilder.dataCollie(DataCollie(Seq(node))).build

  @Test
  def happyCase(): Unit = Await.result(collie.verifyNode(node), Duration(30, TimeUnit.SECONDS))

  @Test
  def badCase(): Unit = {
    errorMessage = CommonUtils.randomString()
    intercept[Exception] {
      Await.result(collie.verifyNode(node), Duration(30, TimeUnit.SECONDS))
    }.getMessage should include("unavailable")
  }

  @After
  def tearDown(): Unit = {
    Releasable.close(collie)
    Releasable.close(sshServer)
  }
} 
Example 29
Source File: TestContainerCreator.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.docker

import java.util.Objects

import oharastream.ohara.client.configurator.VolumeApi.Volume
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.{ExecutionContext, Future}

class TestContainerCreator extends OharaTest {
  private[this] def fake(): DockerClient.ContainerCreator =
    (
      nodeName: String,
      hostname: String,
      imageName: String,
      volumeMaps: Map[Volume, String],
      name: String,
      command: Option[String],
      arguments: Seq[String],
      ports: Map[Int, Int],
      envs: Map[String, String],
      routes: Map[String, String],
      _: ExecutionContext
    ) =>
      Future.successful {
        // we check only the required arguments
        CommonUtils.requireNonEmpty(nodeName)
        CommonUtils.requireNonEmpty(hostname)
        CommonUtils.requireNonEmpty(imageName)
        CommonUtils.requireNonEmpty(name)
        Objects.requireNonNull(command)
        Objects.requireNonNull(ports)
        Objects.requireNonNull(envs)
        Objects.requireNonNull(routes)
        Objects.requireNonNull(arguments)
        Objects.requireNonNull(volumeMaps)
      }

  @Test
  def nullHostname(): Unit = an[NullPointerException] should be thrownBy fake().hostname(null)

  @Test
  def emptyHostname(): Unit = an[IllegalArgumentException] should be thrownBy fake().hostname("")

  @Test
  def nullImageName(): Unit = an[NullPointerException] should be thrownBy fake().imageName(null)

  @Test
  def emptyImageName(): Unit = an[IllegalArgumentException] should be thrownBy fake().imageName("")

  @Test
  def nullName(): Unit = an[NullPointerException] should be thrownBy fake().name(null)

  @Test
  def emptyName(): Unit = an[IllegalArgumentException] should be thrownBy fake().name("")

  @Test
  def nullCommand(): Unit = an[NullPointerException] should be thrownBy fake().command(null)

  @Test
  def emptyCommand(): Unit = fake().command("")

  @Test
  def nullPorts(): Unit = an[NullPointerException] should be thrownBy fake().portMappings(null)

  @Test
  def emptyPorts(): Unit = fake().portMappings(Map.empty)

  @Test
  def nullEnvs(): Unit = an[NullPointerException] should be thrownBy fake().envs(null)

  @Test
  def emptyEnvs(): Unit = fake().envs(Map.empty)

  @Test
  def nullRoute(): Unit = an[NullPointerException] should be thrownBy fake().routes(null)

  @Test
  def emptyRoute(): Unit = fake().routes(Map.empty)

  @Test
  def nullArguments(): Unit = an[NullPointerException] should be thrownBy fake().arguments(null)

  @Test
  def emptyArguments(): Unit = fake().arguments(Seq.empty)

  @Test
  def testExecuteWithoutRequireArguments(): Unit =
    // At least assign imageName
    an[NullPointerException] should be thrownBy fake().create()
} 
Example 30
Source File: TestK8SClientVerify.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.k8s

import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.fake.FakeK8SClient
import oharastream.ohara.agent.{DataCollie, ServiceCollie}
import oharastream.ohara.client.configurator.NodeApi.Node
import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration

class TestK8SClientVerify extends OharaTest {
  private[this] val dataCollie: DataCollie = DataCollie(Seq.empty)

  private[this] def node: Node = Node("ohara", "user", "password")

  @Test
  def testMockK8sClientVerifyNode1(): Unit = {
    val fakeK8SClient = new FakeK8SClient(true, Option(K8SStatusInfo(true, "")), "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    Await.result(
      serviceCollie.verifyNode(node),
      Duration(30, TimeUnit.SECONDS)
    ) shouldBe "ohara node is running."
  }

  @Test
  def testMockK8sClientVerifyNode2(): Unit = {
    val fakeK8SClient = new FakeK8SClient(true, Option(K8SStatusInfo(false, "node failed.")), "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    intercept[IllegalStateException] {
      Await.result(
        serviceCollie.verifyNode(node),
        Duration(30, TimeUnit.SECONDS)
      )
    }.getMessage shouldBe "ohara node doesn't running container. cause: node failed."
  }

  @Test
  def testMockK8sClientVerifyNode3(): Unit = {
    val fakeK8SClient = new FakeK8SClient(false, Option(K8SStatusInfo(false, "failed")), "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    intercept[IllegalStateException] {
      Await.result(
        serviceCollie.verifyNode(node),
        Duration(30, TimeUnit.SECONDS)
      )
    }.getMessage shouldBe "ohara node doesn't running container. cause: failed"
  }

  @Test
  def testMockK8SClientVerifyNode4(): Unit = {
    val fakeK8SClient = new FakeK8SClient(false, None, "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    intercept[IllegalStateException] {
      Await.result(
        serviceCollie.verifyNode(node),
        Duration(30, TimeUnit.SECONDS)
      )
    }.getMessage shouldBe "ohara node doesn't running container. cause: ohara node doesn't exists."
  }
} 
Example 31
Source File: TestK8SServiceCollieImpl.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.k8s

import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.DataCollie
import oharastream.ohara.agent.fake.FakeK8SClient
import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.client.configurator.NodeApi.{Node, Resource}
import oharastream.ohara.common.rule.OharaTest
import org.junit.Test

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, ExecutionContext, Future}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.duration.Duration

class TestK8SServiceCollieImpl extends OharaTest {
  @Test
  def testResource(): Unit = {
    val nodeCache  = (1 to 3).map(x => Node(s"node$x", "user", "password"))
    val dataCollie = DataCollie(nodeCache)

    val k8sClient = new FakeK8SClient(false, None, "container1") {
      override def resources()(
        implicit executionContext: ExecutionContext
      ): Future[Map[String, Seq[NodeApi.Resource]]] =
        Future.successful {
          Map(
            "node1" -> Seq(Resource.cpu(8, Option(2.0)), Resource.memory(1024 * 1024 * 1024 * 100, Option(5.0))),
            "node2" -> Seq(Resource.cpu(8, Option(1.0)), Resource.memory(1024 * 1024 * 1024 * 100, Option(5.0))),
            "node3" -> Seq(Resource.cpu(8, Option(3.0)), Resource.memory(1024 * 1024 * 1024 * 100, Option(5.0)))
          )
        }
    }

    val k8sServiceCollieImpl = new K8SServiceCollieImpl(dataCollie, k8sClient)
    val resource             = result(k8sServiceCollieImpl.resources())
    resource.size shouldBe 3
    val nodeNames = resource.keys.toSeq
    nodeNames(0) shouldBe "node1"
    nodeNames(1) shouldBe "node2"
    nodeNames(2) shouldBe "node3"

    val node1Resource: Seq[Resource] =
      resource.filter(x => x._1 == "node1").flatMap(x => x._2).toSeq

    node1Resource(0).name shouldBe "CPU"
    node1Resource(0).unit shouldBe "cores"
    node1Resource(0).used.get shouldBe 2.0
    node1Resource(0).value shouldBe 8

    node1Resource(1).name shouldBe "Memory"
    node1Resource(1).unit shouldBe "bytes"
    node1Resource(1).used.get shouldBe 5.0
    node1Resource(1).value shouldBe 1024 * 1024 * 1024 * 100
  }

  @Test
  def testEmptyResource(): Unit = {
    val nodeCache  = (1 to 3).map(x => Node(s"node$x", "user", "password"))
    val dataCollie = DataCollie(nodeCache)

    val k8sClient = new FakeK8SClient(false, None, "container1") {
      override def resources()(
        implicit executionContext: ExecutionContext
      ): Future[Map[String, Seq[NodeApi.Resource]]] =
        Future.successful(Map.empty)
    }

    val k8sServiceCollieImpl = new K8SServiceCollieImpl(dataCollie, k8sClient)
    val resource             = result(k8sServiceCollieImpl.resources())
    resource.size shouldBe 0
  }

  private[this] def result[T](future: Future[T]): T = Await.result(future, Duration(10, TimeUnit.SECONDS))
} 
Example 32
Source File: TestAgent.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.Releasable
import oharastream.ohara.testing.service.SshdServer
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._
import scala.jdk.CollectionConverters._

class TestAgent extends OharaTest {
  private[this] val server = SshdServer.local(
    0,
    java.util.Map.of(
      "hello",
      (_: String) => java.util.List.of("world"),
      "oharastream",
      (_: String) => java.util.List.of("ohara")
    )
  )

  @Test
  def testJaveVersion(): Unit = {
    val agent =
      Agent.builder.hostname(server.hostname).port(server.port).user(server.user).password(server.password).build
    try {
      val result = agent.execute("java -version").get
      result.toLowerCase should include("jdk")
    } finally agent.close()
  }

  @Test
  def testCustomCommand(): Unit = {
    def assertResponse(request: String, response: java.util.List[String]): Unit = {
      val agent =
        Agent.builder.hostname(server.hostname).port(server.port).user(server.user).password(server.password).build
      try agent.execute(request).get.split("\n").toSeq shouldBe response.asScala.toSeq
      finally agent.close()
    }
    assertResponse("hello", java.util.List.of("world"))
    assertResponse("oharastream", java.util.List.of("ohara"))
  }

  @Test
  def nullHostname(): Unit = an[NullPointerException] should be thrownBy Agent.builder.hostname(null)

  @Test
  def emptyHostname(): Unit = an[IllegalArgumentException] should be thrownBy Agent.builder.hostname("")

  @Test
  def negativePort(): Unit = {
    an[IllegalArgumentException] should be thrownBy Agent.builder.port(0)
    an[IllegalArgumentException] should be thrownBy Agent.builder.port(-1)
  }

  @Test
  def nullUser(): Unit = an[NullPointerException] should be thrownBy Agent.builder.user(null)

  @Test
  def emptyUser(): Unit = an[IllegalArgumentException] should be thrownBy Agent.builder.user("")

  @Test
  def nullPassword(): Unit = an[NullPointerException] should be thrownBy Agent.builder.password(null)

  @Test
  def emptyPassword(): Unit = an[IllegalArgumentException] should be thrownBy Agent.builder.password("")

  @Test
  def nullTimeout(): Unit = an[NullPointerException] should be thrownBy Agent.builder.timeout(null)

  @Test
  def nullCharset(): Unit = an[NullPointerException] should be thrownBy Agent.builder.charset(null)

  @After
  def tearDown(): Unit = Releasable.close(server)
} 
Example 33
Source File: TestRemoteFolderHandler.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.NodeApi.Node
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.service.SshdServer
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestRemoteFolderHandler extends OharaTest {
  private[this] val server   = SshdServer.local(0)
  private[this] val hostname = server.hostname()
  private[this] val dataCollie = DataCollie(
    Seq(
      Node(
        hostname = hostname,
        port = server.port(),
        user = server.user(),
        password = server.password()
      )
    )
  )
  private[this] val folderHandler = RemoteFolderHandler.builder().dataCollie(dataCollie).build()

  @Test
  def testFolderNotExists(): Unit =
    result(folderHandler.exist(server.hostname(), "/home/ohara100")) shouldBe false

  @Test
  def testCreateFolderAndDelete(): Unit = {
    val path = s"/tmp/${CommonUtils.randomString(5)}"
    result(folderHandler.create(hostname, path)) shouldBe true
    result(folderHandler.exist(hostname, path)) shouldBe true
    // file exists so it does nothing
    result(folderHandler.create(hostname, path)) shouldBe false
    result(folderHandler.delete(hostname, path)) shouldBe true
    result(folderHandler.delete(hostname, path)) shouldBe false
  }

  @Test
  def testListFolder(): Unit = {
    result(folderHandler.list(hostname, "/tmp")).size should not be 0
    val path = s"/tmp/${CommonUtils.randomString(5)}"
    result(folderHandler.create(hostname, path)) shouldBe true
    result(folderHandler.list(hostname, "/tmp")) should contain(path)
  }

  @Test
  def testInspectFolder(): Unit = {
    val folderInfo = result(folderHandler.inspect(hostname, "/tmp"))
    folderInfo.name shouldBe "tmp"
    folderInfo.permission shouldBe FolderPermission.READWRITE
    folderInfo.size should be > 0L
    folderInfo.uid should be >= 0
  }

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(120, TimeUnit.SECONDS))

  @After
  def tearDown(): Unit = Releasable.close(server)
} 
Example 34
Source File: TestConfiguratorMain.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator
import java.util.concurrent.{Executors, TimeUnit}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator.Mode
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._
import scala.concurrent.{ExecutionContext, Future}

class TestConfiguratorMain extends OharaTest {
  @Test
  def illegalK8sUrl(): Unit =
    intercept[IllegalArgumentException] {
      Configurator.main(Array[String](Configurator.K8S_KEY, s"http://localhost:${CommonUtils.availablePort()}"))
    }.getMessage should include("unable to access")

  @Test
  def emptyK8sArgument(): Unit =
    an[IllegalArgumentException] should be thrownBy Configurator.main(Array[String](Configurator.K8S_KEY, ""))

  @Test
  def nullK8sArgument(): Unit =
    an[IllegalArgumentException] should be thrownBy Configurator.main(Array[String](Configurator.K8S_KEY))

  @Test
  def fakeWithK8s(): Unit =
    an[IllegalArgumentException] should be thrownBy Configurator.main(
      Array[String](Configurator.K8S_KEY, "http://localhost", Configurator.FAKE_KEY, "true")
    )

  @Test
  def k8sWithFake(): Unit =
    an[IllegalArgumentException] should be thrownBy Configurator.main(
      Array[String](Configurator.FAKE_KEY, "true", Configurator.K8S_KEY, "http://localhost")
    )

  @Test
  def testFakeMode(): Unit =
    runMain(
      Array[String](Configurator.HOSTNAME_KEY, "localhost", Configurator.PORT_KEY, "0", Configurator.FAKE_KEY, "true"),
      configurator => configurator.mode shouldBe Mode.FAKE
    )

  @Test
  def testDockerMode(): Unit =
    runMain(
      Array[String](Configurator.HOSTNAME_KEY, "localhost", Configurator.PORT_KEY, "0"),
      configurator => configurator.mode shouldBe Mode.DOCKER
    )

  private[this] def runMain(args: Array[String], action: Configurator => Unit): Unit = {
    Configurator.GLOBAL_CONFIGURATOR_SHOULD_CLOSE = false
    val service = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor())
    Future[Unit](Configurator.main(args))(service)
    import java.time.Duration
    try {
      CommonUtils.await(() => Configurator.GLOBAL_CONFIGURATOR_RUNNING, Duration.ofSeconds(30))
      action(Configurator.GLOBAL_CONFIGURATOR)
    } finally {
      Configurator.GLOBAL_CONFIGURATOR_SHOULD_CLOSE = true
      service.shutdownNow()
      service.awaitTermination(60, TimeUnit.SECONDS)
    }
  }

  @After
  def tearDown(): Unit = {
    Configurator.GLOBAL_CONFIGURATOR_SHOULD_CLOSE = false
    Releasable.close(Configurator.GLOBAL_CONFIGURATOR)
    Configurator.GLOBAL_CONFIGURATOR == null
  }
} 
Example 35
Source File: TestDefaultValuesAutoComplete.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.{ConcurrentHashMap, TimeUnit}

import oharastream.ohara.client.configurator.{ConnectorApi, WorkerApi}
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestDefaultValuesAutoComplete extends WithBrokerWorker {
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] val configurator =
    Configurator.builder.fake(testUtil().brokersConnProps(), testUtil().workersConnProps()).build()

  private[this] val workerClusterInfo = result(
    WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()
  ).head

  private[this] val connectorApi = ConnectorApi.access.hostname(configurator.hostname).port(configurator.port)

  @Test
  def testDefaultValuesForPerfSource(): Unit = {
    val connector = result(
      connectorApi.request
        .workerClusterKey(workerClusterInfo.key)
        .className("oharastream.ohara.connector.perf.PerfSource")
        .create()
    )
    connector.settings.keySet should contain("perf.batch")
    connector.settings.keySet should contain("perf.frequency")
    connector.settings.keySet should contain("perf.cell.length")
  }

  @Test
  def testDefaultValuesForConsoleSink(): Unit = {
    val connector = result(
      connectorApi.request
        .workerClusterKey(workerClusterInfo.key)
        .className("oharastream.ohara.connector.console.ConsoleSink")
        .create()
    )
    connector.settings.keySet should contain("console.sink.frequence")
    connector.settings.keySet should contain("console.sink.row.divider")

    val a = new ConcurrentHashMap[String, String]()
    import scala.jdk.CollectionConverters._
    a.elements().asScala.toSeq
  }
} 
Example 36
Source File: TestFakeConnectorAdmin.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator
import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.ConnectorApi.State
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.{ConnectorKey, TopicKey}
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.configurator.fake.FakeConnectorAdmin
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestFakeConnectorAdmin extends OharaTest {
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))
  @Test
  def testControlConnector(): Unit = {
    val connectorKey = ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val topicKey     = TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val className    = CommonUtils.randomString(10)
    val fake         = new FakeConnectorAdmin()
    result(
      fake
        .connectorCreator()
        .connectorKey(connectorKey)
        .topicKey(topicKey)
        .numberOfTasks(1)
        .className(className)
        .create()
    )

    result(fake.exist(connectorKey)) shouldBe true

    result(fake.status(connectorKey)).connector.state shouldBe State.RUNNING.name

    result(fake.pause(connectorKey))
    result(fake.status(connectorKey)).connector.state shouldBe State.PAUSED.name

    result(fake.resume(connectorKey))
    result(fake.status(connectorKey)).connector.state shouldBe State.RUNNING.name

    result(fake.delete(connectorKey))
    result(fake.exist(connectorKey)) shouldBe false
  }
} 
Example 37
Source File: TestOfficialConnectorsDefinition.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.WithDefinitions
import oharastream.ohara.shabondi.{ShabondiSink, ShabondiSource}
import org.junit.Test
import org.scalatest.matchers.should.Matchers._


class TestOfficialConnectorsDefinition extends OharaTest {
  @Test
  def testLocalConnectors(): Unit = {
    ReflectionUtils.localConnectorDefinitions.size should not be 0
    ReflectionUtils.localConnectorDefinitions.map(_.className) should contain(
      classOf[oharastream.ohara.connector.ftp.FtpSource].getName
    )
    ReflectionUtils.localConnectorDefinitions.map(_.className) should contain(
      classOf[oharastream.ohara.connector.smb.SmbSource].getName
    )
    ReflectionUtils.localConnectorDefinitions.map(_.className) should contain(
      classOf[oharastream.ohara.connector.perf.PerfSource].getName
    )
    ReflectionUtils.localConnectorDefinitions.map(_.className) should contain(
      classOf[oharastream.ohara.connector.jdbc.source.JDBCSourceConnector].getName
    )
    ReflectionUtils.localConnectorDefinitions.map(_.className) should contain(
      classOf[oharastream.ohara.connector.hdfs.sink.HDFSSink].getName
    )
    ReflectionUtils.localConnectorDefinitions.map(_.className) should not contain classOf[ShabondiSink].getName
    ReflectionUtils.localConnectorDefinitions.map(_.className) should not contain classOf[ShabondiSource].getName
  }

  @Test
  def testOrderInGroup(): Unit = {
    val illegalConnectors =
      ReflectionUtils.localConnectorDefinitions.filter(_.settingDefinitions.exists(_.orderInGroup() < 0))
    if (illegalConnectors.nonEmpty)
      throw new AssertionError(
        illegalConnectors
          .map(
            d =>
              s"the following definitions in ${d.className} have illegal orderInGroup. ${d.settingDefinitions
                .map(d => s"${d.key()} has orderInGroup:${d.orderInGroup()}")
                .mkString(",")}"
          )
          .mkString(",")
      )
  }

  private[this] def localConnectorDefinitions =
    ReflectionUtils.localConnectorDefinitions.filter(_.className != classOf[FallibleSink].getName)

  @Test
  def testVersion(): Unit = {
    val illegalConnectors = localConnectorDefinitions
      .map(c => c.className -> c.settingDefinitions.find(_.key == WithDefinitions.VERSION_KEY).get.defaultString())
      .toMap
      .filter(_._2 == "unknown")
    if (illegalConnectors.nonEmpty)
      throw new AssertionError(
        illegalConnectors
          .map {
            case (className, version) => s"$className has illegal version:$version"
          }
          .mkString(",")
      )
  }

  @Test
  def testRevision(): Unit = {
    val illegalConnectors = localConnectorDefinitions
      .map(c => c.className -> c.settingDefinitions.find(_.key == WithDefinitions.VERSION_KEY).get.defaultString())
      .toMap
      .filter(_._2 == "unknown")
    if (illegalConnectors.nonEmpty)
      throw new AssertionError(
        illegalConnectors
          .map {
            case (className, version) => s"$className has illegal revision:$version"
          }
          .mkString(",")
      )
  }

  @Test
  def testAuthor(): Unit = {
    val illegalConnectors = localConnectorDefinitions
      .map(c => c.className -> c.settingDefinitions.find(_.key == WithDefinitions.AUTHOR_KEY).get.defaultString())
      .toMap
      .filter(_._2 == "unknown")
    if (illegalConnectors.nonEmpty)
      throw new AssertionError(
        illegalConnectors
          .map {
            case (className, version) => s"$className has illegal author:$version"
          }
          .mkString(",")
      )
  }
} 
Example 38
Source File: TestInspectWorkerCluster.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.{InspectApi, WorkerApi}
import oharastream.ohara.common.util.Releasable
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestInspectWorkerCluster extends WithBrokerWorker {
  private[this] val configurator =
    Configurator.builder.fake(testUtil().brokersConnProps(), testUtil().workersConnProps()).build()

  private[this] val workerClusterInfo = result(
    WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()
  ).head
  private[this] def inspectApi = InspectApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def inspectWithoutKey(): Unit = {
    val info = result(inspectApi.workerInfo())
    info.imageName shouldBe WorkerApi.IMAGE_NAME_DEFAULT
    info.settingDefinitions.size shouldBe WorkerApi.DEFINITIONS.size
    info.settingDefinitions.foreach { definition =>
      definition shouldBe WorkerApi.DEFINITIONS.find(_.key() == definition.key()).get
    }
  }

  @Test
  def inspectWithKey(): Unit = {
    val info = result(inspectApi.workerInfo(workerClusterInfo.key))
    info.imageName shouldBe WorkerApi.IMAGE_NAME_DEFAULT
    info.settingDefinitions.size shouldBe WorkerApi.DEFINITIONS.size
    info.settingDefinitions.foreach { definition =>
      definition shouldBe WorkerApi.DEFINITIONS.find(_.key() == definition.key()).get
    }
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 39
Source File: TestResponseFromUnsupportedApis.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.{HttpMethod, HttpMethods, HttpRequest}
import akka.http.scaladsl.unmarshalling.Unmarshal
import oharastream.ohara.client.configurator.ErrorApi
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestResponseFromUnsupportedApis extends OharaTest {
  private[this] val configurator = Configurator.builder.fake().build()

  private[this] implicit val actorSystem: ActorSystem = ActorSystem("Executor-TestResponseFromUnsupportedApis")

  private[this] val expectedMessage = oharastream.ohara.configurator.route.apiUrl

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def testGet(): Unit = sendRequest(HttpMethods.GET, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  @Test
  def testPut(): Unit = sendRequest(HttpMethods.PUT, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  @Test
  def testDelete(): Unit =
    sendRequest(HttpMethods.DELETE, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  @Test
  def testPost(): Unit = sendRequest(HttpMethods.POST, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  private[this] def sendRequest(method: HttpMethod, postfix: String): ErrorApi.Error =
    result(
      Http()
        .singleRequest(HttpRequest(method, s"http://${configurator.hostname}:${configurator.port}/$postfix"))
        .flatMap { response =>
          if (response.status.isSuccess()) Future.failed(new AssertionError())
          else Unmarshal(response.entity).to[ErrorApi.Error]
        }
    )

  @After
  def tearDown(): Unit = {
    Releasable.close(configurator)
    result(actorSystem.terminate())
  }
} 
Example 40
Source File: TestListManyPipelines.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.{BrokerApi, ConnectorApi, PipelineApi, TopicApi, WorkerApi}
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestListManyPipelines extends WithBrokerWorker {
  private[this] val configurator =
    Configurator.builder.fake(testUtil().brokersConnProps(), testUtil().workersConnProps()).build()

  private[this] val workerClusterInfo = result(
    WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()
  ).head

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] val numberOfPipelines = 30
  @Test
  def test(): Unit = {
    val topic = result(
      TopicApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .request
        .name(CommonUtils.randomString(10))
        .brokerClusterKey(
          result(BrokerApi.access.hostname(configurator.hostname).port(configurator.port).list()).head.key
        )
        .create()
    )

    val connector = result(
      ConnectorApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .request
        .name(CommonUtils.randomString(10))
        .className("oharastream.ohara.connector.perf.PerfSource")
        .topicKey(topic.key)
        .numberOfTasks(1)
        .workerClusterKey(workerClusterInfo.key)
        .create()
    )

    val pipelines = (0 until numberOfPipelines).map { _ =>
      result(
        PipelineApi.access
          .hostname(configurator.hostname)
          .port(configurator.port)
          .request
          .name(CommonUtils.randomString(10))
          .endpoint(connector)
          .endpoint(topic)
          .create()
      )
    }

    val listPipeline =
      Await.result(
        PipelineApi.access.hostname(configurator.hostname).port(configurator.port).list(),
        Duration(20, TimeUnit.SECONDS)
      )
    pipelines.size shouldBe listPipeline.size
    pipelines.foreach(p => listPipeline.exists(_.name == p.name) shouldBe true)
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 41
Source File: TestConcurrentAccess.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.{Executors, TimeUnit}

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future}

class TestConcurrentAccess extends OharaTest {
  private[this] val configurator = Configurator.builder.fake().build()

  private[this] val nodeApi = NodeApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(10, TimeUnit.SECONDS))

  
  @Test
  def deletedObjectShouldDisappearFromGet(): Unit = {
    val threadCount                                         = 10
    val threadsPool                                         = Executors.newFixedThreadPool(threadCount)
    val unmatchedCount                                      = new AtomicInteger()
    implicit val executionContext: ExecutionContextExecutor = ExecutionContext.fromExecutor(threadsPool)
    (0 until threadCount).foreach { _ =>
      threadsPool.execute { () =>
        val nodeName = CommonUtils.randomString(10)
        val nodes = result(
          nodeApi.request
            .nodeName(nodeName)
            .user(CommonUtils.randomString(10))
            .password(CommonUtils.randomString(10))
            .create()
            .flatMap(node => nodeApi.delete(node.key))
            .flatMap(_ => nodeApi.list())
        )
        if (nodes.exists(_.hostname == nodeName)) unmatchedCount.incrementAndGet()
      }
    }
    threadsPool.shutdown()
    threadsPool.awaitTermination(60, TimeUnit.SECONDS) shouldBe true
    unmatchedCount.get() shouldBe 0
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 42
Source File: TestRouteUtils.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.SettingDef
import oharastream.ohara.common.setting.SettingDef.Permission
import org.junit.Test
import spray.json.JsString
import org.scalatest.matchers.should.Matchers._

class TestRouteUtils extends OharaTest {
  @Test
  def testUpdatable(): Unit = {
    val settings = Map("a" -> JsString("b"))
    val settingDef = SettingDef
      .builder()
      .key("a")
      .permission(Permission.CREATE_ONLY)
      .build()
    keepEditableFields(settings, Seq(settingDef)) shouldBe Map.empty
  }
} 
Example 43
Source File: TestNodeNameUpperCaseRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestNodeNameUpperCaseRoute extends OharaTest {
  private[this] val numberOfCluster = 1
  private[this] val configurator =
    Configurator.builder.fake(numberOfCluster, numberOfCluster, "zookeepercluster").build()
  private[this] val nodeApi                    = NodeApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def testAddNodeNameLowerCase(): Unit = {
    val name = CommonUtils.randomString(10).toLowerCase
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create()).name shouldBe name
  }

  @Test
  def testAddNodeNameUpperCase1(): Unit = {
    val name = CommonUtils.randomString(10).toUpperCase
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create())
  }

  @Test
  def testAddNodeNameUpperCase2(): Unit = {
    val name = "HOST1.test"
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create())
  }

  @Test
  def testAddNodeNameUpperCase3(): Unit = {
    val name = "aaa-Node1.test"
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create())
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 44
Source File: TestValidationRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.client.configurator.{ValidationApi, WorkerApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.TopicKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.{Configurator, FallibleSink}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestValidationRoute extends OharaTest {
  private[this] val configurator = Configurator.builder.fake().build()

  private[this] val wkCluster = result(WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()).head

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds"))
  @Test
  def validateConnector(): Unit = {
    val className = classOf[FallibleSink].getName
    val response = result(
      ValidationApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .connectorRequest
        .name(CommonUtils.randomString(10))
        .className(className)
        .topicKey(TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5)))
        .workerClusterKey(wkCluster.key)
        .verify()
    )
    response.className.get() shouldBe className
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 45
Source File: TestPrivateRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.PrivateApi.Deletion
import oharastream.ohara.client.configurator.{PrivateApi, WorkerApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.configurator.Configurator
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestPrivateRoute extends OharaTest {
  private[this] val workerCount  = 2
  private[this] val configurator = Configurator.builder.fake(1, workerCount).build()

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] val workerApi = WorkerApi.access.hostname(configurator.hostname).port(configurator.port)

  @Test
  def testDeletion(): Unit = {
    val workers = result(workerApi.list())
    val group   = workers.head.group
    val kind    = workers.head.kind
    workers.size shouldBe workerCount
    result(
      workerApi.request
        .group(group)
        .nodeNames(workers.head.nodeNames)
        .brokerClusterKey(workers.head.brokerClusterKey)
        .create()
    )

    result(workerApi.list()).size shouldBe workers.size + 1

    // we use same group to create an new worker cluster
    result(workerApi.list()).groupBy(_.group).size shouldBe workerCount

    result(
      PrivateApi.delete(
        hostname = configurator.hostname,
        port = configurator.port,
        deletion = Deletion(groups = Set(group), kinds = Set(kind))
      )
    )

    val latestWorkers = result(workerApi.list())
    latestWorkers.size shouldBe workers.size - 1

    // delete again
    result(
      PrivateApi.delete(
        hostname = configurator.hostname,
        port = configurator.port,
        deletion = Deletion(groups = Set(group), kinds = Set(kind))
      )
    )
    result(workerApi.list()).size shouldBe latestWorkers.size

    // delete group without kind
    result(
      PrivateApi.delete(
        hostname = configurator.hostname,
        port = configurator.port,
        deletion = Deletion(groups = Set(latestWorkers.head.group), kinds = Set.empty)
      )
    )
    result(workerApi.list()).size shouldBe latestWorkers.size
  }
} 
Example 46
Source File: TestContainerRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.client.configurator._
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Before, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.Duration

class TestContainerRoute extends OharaTest {
  private[this] val configurator = Configurator.builder.fake(0, 0).build()
  private[this] val containerApi = ContainerApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] val brokerApi    = BrokerApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] val workerApi    = WorkerApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] val zkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10))
  private[this] val bkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10))
  private[this] val wkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10))

  private[this] val nodeNames: Set[String] = Set("n0", "n1")

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds"))
  @Before
  def setup(): Unit = {
    val nodeApi = NodeApi.access.hostname(configurator.hostname).port(configurator.port)

    nodeNames.isEmpty shouldBe false
    nodeNames.foreach { n =>
      result(nodeApi.request.nodeName(n).port(22).user("user").password("pwd").create())
    }

    val zk = result(
      ZookeeperApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .request
        .key(zkClusterKey)
        .nodeNames(nodeNames)
        .create()
    )
    zk.key shouldBe zkClusterKey
    result(ZookeeperApi.access.hostname(configurator.hostname).port(configurator.port).start(zk.key))

    val bk = result(brokerApi.request.key(bkClusterKey).zookeeperClusterKey(zkClusterKey).nodeNames(nodeNames).create())
    result(brokerApi.start(bk.key))

    val wk = result(workerApi.request.key(wkClusterKey).brokerClusterKey(bkClusterKey).nodeNames(nodeNames).create())
    result(workerApi.start(wk.key))
  }

  @Test
  def testGetContainersOfBrokerCluster(): Unit = {
    val containerGroups = result(containerApi.get(bkClusterKey))
    containerGroups.size should not be 0
    containerGroups.foreach(group => {
      group.clusterKey shouldBe bkClusterKey
      group.clusterType shouldBe "broker"
      group.containers.size should not be 0
    })
  }

  @Test
  def testGetContainersOfWorkerCluster(): Unit = {
    val containerGroups = result(containerApi.get(wkClusterKey))
    containerGroups.size should not be 0
    containerGroups.foreach(group => {
      group.clusterKey shouldBe wkClusterKey
      group.clusterType shouldBe "worker"
      group.containers.size should not be 0
    })
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 47
Source File: TestClusterNameUpperCaseRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.client.configurator.{NodeApi, ZookeeperApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._
import spray.json.DeserializationException

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestClusterNameUpperCaseRoute extends OharaTest {
  private[this] val numberOfCluster = 1
  private[this] val configurator =
    Configurator.builder.fake(numberOfCluster, numberOfCluster, "zk").build()
  private[this] val nodeApi      = NodeApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] val zookeeperApi = ZookeeperApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds"))
  @Test
  def testAddZookeeper(): Unit = {
    result(nodeApi.request.nodeName("host1").port(22).user("b").password("c").create())

    an[DeserializationException] should be thrownBy result(
      zookeeperApi.request.name(s"ZK-${CommonUtils.randomString(10)}").nodeName("host1").create()
    )
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 48
Source File: TestObjectRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.ObjectApi
import oharastream.ohara.client.configurator.ObjectApi.ObjectInfo
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._
import spray.json.JsString

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestObjectRoute extends OharaTest {
  private[this] val configurator = Configurator.builder.fake(1, 1).build()

  private[this] val objectApi = ObjectApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] def create(): ObjectInfo = {
    val key = ObjectKey.of("g", "n")
    val settings = Map(
      CommonUtils.randomString() -> JsString(CommonUtils.randomString()),
      CommonUtils.randomString() -> JsString(CommonUtils.randomString())
    )
    val objectInfo = result(objectApi.request.key(key).settings(settings).create())
    objectInfo.key shouldBe key
    settings.foreach {
      case (k, v) => objectInfo.settings(k) shouldBe v
    }
    objectInfo
  }

  @Test
  def testCreate(): Unit = create()

  @Test
  def testGet(): Unit = {
    val objectInfo = create()
    objectInfo shouldBe result(objectApi.get(objectInfo.key))
  }

  @Test
  def testGetNothing(): Unit =
    an[IllegalArgumentException] should be thrownBy result(objectApi.get(ObjectKey.of(CommonUtils.randomString(), "n")))

  @Test
  def testList(): Unit = {
    val objectInfo = create()
    objectInfo shouldBe result(objectApi.list()).head
  }

  @Test
  def testDelete(): Unit = {
    val objectInfo = create()
    result(objectApi.delete(objectInfo.key))
    result(objectApi.list()) shouldBe Seq.empty
  }

  @Test
  def testUpdate(): Unit = {
    val objectInfo = create()
    val settings = Map(
      CommonUtils.randomString() -> JsString(CommonUtils.randomString()),
      CommonUtils.randomString() -> JsString(CommonUtils.randomString())
    )
    val updated = result(objectApi.request.key(objectInfo.key).settings(settings).update())
    settings.foreach {
      case (k, v) => updated.settings(k) shouldBe v
    }
    objectInfo.settings.foreach {
      case (k, v) =>
        if (k == "lastModified") updated.settings(k) should not be v
        else updated.settings(k) shouldBe v
    }
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 49
Source File: TestDataStoreBuilder.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.store

import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestDataStoreBuilder extends OharaTest {
  @Test
  def testNullValueSerializer(): Unit =
    an[NullPointerException] should be thrownBy DataStore.builder.dataSerializer(null)

  @Test
  def testNullFolder(): Unit =
    an[NullPointerException] should be thrownBy DataStore.builder.persistentFolder(null)

  @Test
  def testEmptyFolder(): Unit =
    an[IllegalArgumentException] should be thrownBy DataStore.builder.persistentFolder("")

  @Test
  def allDefault(): Unit = DataStore.builder.build().close()
} 
Example 50
Source File: TestMetricsCache.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.store

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.MetricsApi.{Meter, Metrics}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.configurator.store.MetricsCache.RequestKey
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.duration.Duration

class TestMetricsCache extends OharaTest {
  @Test
  def testRequestKey(): Unit = {
    val key = RequestKey(
      key = ObjectKey.of(CommonUtils.randomString(), CommonUtils.randomString()),
      service = CommonUtils.randomString()
    )

    key shouldBe key
    key should not be key.copy(key = ObjectKey.of(CommonUtils.randomString(), CommonUtils.randomString()))
    key should not be key.copy(service = CommonUtils.randomString())
  }

  @Test
  def nullRefresher(): Unit =
    an[NullPointerException] should be thrownBy MetricsCache.builder.refresher(null)

  @Test
  def nullFrequency(): Unit =
    an[NullPointerException] should be thrownBy MetricsCache.builder.frequency(null)

  @Test
  def testRefresh(): Unit = {
    val data = Map(
      ObjectKey.of("a", "b") -> Metrics(
        Seq(
          Meter(
            name = "name",
            value = 1.1,
            unit = "unit",
            document = "document",
            queryTime = CommonUtils.current(),
            startTime = Some(CommonUtils.current()),
            lastModified = Some(CommonUtils.current()),
            valueInPerSec = None
          )
        )
      )
    )
    val clusterInfo = FakeClusterInfo(CommonUtils.randomString())
    val cache = MetricsCache.builder
      .refresher(() => Map(clusterInfo -> Map(CommonUtils.hostname() -> data)))
      .frequency(Duration(2, TimeUnit.SECONDS))
      .build
    try {
      cache.meters(clusterInfo) shouldBe Map.empty
      TimeUnit.SECONDS.sleep(3)
      cache.meters(clusterInfo)(CommonUtils.hostname()) shouldBe data
    } finally cache.close()
  }

  @Test
  def failToOperateAfterClose(): Unit = {
    val cache = MetricsCache.builder.refresher(() => Map.empty).frequency(Duration(2, TimeUnit.SECONDS)).build
    cache.close()

    an[IllegalStateException] should be thrownBy cache.meters(FakeClusterInfo(CommonUtils.randomString()))
  }
} 
Example 51
Source File: TestFtpSourceDefinitions.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.ftp

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.SettingDef
import oharastream.ohara.common.setting.SettingDef.{Necessary, Permission, Reference}
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
class TestFtpSourceDefinitions extends OharaTest {
  private[this] val ftpSource = new FtpSource
  @Test
  def checkInputFolder(): Unit = {
    val definition = ftpSource.settingDefinitions().get(INPUT_FOLDER_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkCompletedFolder(): Unit = {
    val definition = ftpSource.settingDefinitions().get(COMPLETED_FOLDER_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkErrorFolder(): Unit = {
    val definition = ftpSource.settingDefinitions().get(ERROR_FOLDER_KEY)
    definition.necessary() shouldBe Necessary.OPTIONAL
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkEncode(): Unit = {
    val definition = ftpSource.settingDefinitions().get(FILE_ENCODE_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultString() shouldBe "UTF-8"
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkHostname(): Unit = {
    val definition = ftpSource.settingDefinitions().get(FTP_HOSTNAME_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkPort(): Unit = {
    val definition = ftpSource.settingDefinitions().get(FTP_PORT_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.REMOTE_PORT
  }

  @Test
  def checkUser(): Unit = {
    val definition = ftpSource.settingDefinitions().get(FTP_USER_NAME_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkPassword(): Unit = {
    val definition = ftpSource.settingDefinitions().get(FTP_PASSWORD_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.PASSWORD
  }
} 
Example 52
Source File: TestFtpSourceProps.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.ftp

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestFtpSourceProps extends OharaTest {
  @Test
  def testGetter(): Unit = {
    val hostname = CommonUtils.randomString()
    val port     = 12345
    val user     = CommonUtils.randomString()
    val password = CommonUtils.randomString()
    val props = FtpSourceProps(
      hostname = hostname,
      port = port,
      user = user,
      password = password
    ).toMap
    props(FTP_HOSTNAME_KEY) shouldBe hostname
    props(FTP_PORT_KEY).toInt shouldBe port
    props(FTP_USER_NAME_KEY) shouldBe user
    props(FTP_PASSWORD_KEY) shouldBe password
  }
} 
Example 53
Source File: TestFtpSinkDefinitions.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.ftp

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.SettingDef
import oharastream.ohara.common.setting.SettingDef.{Necessary, Permission, Reference}
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestFtpSinkDefinitions extends OharaTest {
  private[this] val ftpSink = new FtpSink
  @Test
  def checkOutputFolder(): Unit = {
    val definition = ftpSink.settingDefinitions().get(OUTPUT_FOLDER_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkNeedHeader(): Unit = {
    val definition = ftpSink.settingDefinitions().get(FILE_NEED_HEADER_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultBoolean() shouldBe true
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.BOOLEAN
  }

  @Test
  def checkEncode(): Unit = {
    val definition = ftpSink.settingDefinitions().get(FILE_ENCODE_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultString() shouldBe "UTF-8"
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkHostname(): Unit = {
    val definition = ftpSink.settingDefinitions().get(FTP_HOSTNAME_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkPort(): Unit = {
    val definition = ftpSink.settingDefinitions().get(FTP_PORT_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.REMOTE_PORT
  }

  @Test
  def checkUser(): Unit = {
    val definition = ftpSink.settingDefinitions().get(FTP_USER_NAME_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.STRING
  }

  @Test
  def checkPassword(): Unit = {
    val definition = ftpSink.settingDefinitions().get(FTP_PASSWORD_KEY)
    definition.necessary() shouldBe Necessary.REQUIRED
    definition.hasDefault shouldBe false
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.PASSWORD
  }
} 
Example 54
Source File: TestFtpSinkProps.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.ftp
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestFtpSinkProps extends OharaTest {
  @Test
  def testGetter(): Unit = {
    val hostname = CommonUtils.randomString()
    val port     = 12345
    val user     = CommonUtils.randomString()
    val password = CommonUtils.randomString()
    val props = FtpSinkProps(
      user = user,
      password = password,
      hostname = hostname,
      port = port
    ).toMap

    props(FTP_HOSTNAME_KEY) shouldBe hostname
    props(FTP_PORT_KEY).toInt shouldBe port
    props(FTP_USER_NAME_KEY) shouldBe user
    props(FTP_PASSWORD_KEY) shouldBe password
  }
} 
Example 55
Source File: TestPerfSourceMetrics.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.perf

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.kafka.ConnectorAdmin
import oharastream.ohara.common.setting.{ConnectorKey, TopicKey}
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.metrics.BeanChannel
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._
import scala.concurrent.Await
import scala.concurrent.duration.Duration

class TestPerfSourceMetrics extends WithBrokerWorker {
  private[this] val connectorAdmin = ConnectorAdmin(testUtil.workersConnProps)

  private[this] val props = PerfSourceProps(
    batch = 5,
    freq = Duration(5, TimeUnit.SECONDS),
    cellSize = 10
  )

  @Test
  def test(): Unit = {
    val topicKey     = TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val connectorKey = ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    Await.result(
      connectorAdmin
        .connectorCreator()
        .topicKey(topicKey)
        .connectorClass(classOf[PerfSource])
        .numberOfTasks(1)
        .connectorKey(connectorKey)
        .settings(props.toMap)
        .create(),
      Duration(20, TimeUnit.SECONDS)
    )
    CommonUtils.await(() => {
      !BeanChannel.local().counterMBeans().isEmpty
    }, java.time.Duration.ofSeconds(30))
    val counters = BeanChannel.local().counterMBeans()
    counters.size should not be 0
    counters.asScala.foreach { counter =>
      counter.getStartTime should not be 0
      CommonUtils.requireNonEmpty(counter.getUnit)
      CommonUtils.requireNonEmpty(counter.getDocument)
    }
  }
} 
Example 56
Source File: TestPerfDefinition.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.perf

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.kafka.ConnectorAdmin
import oharastream.ohara.common.setting.SettingDef.{Necessary, Permission, Reference}
import oharastream.ohara.common.setting.{ConnectorKey, SettingDef, TopicKey}
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.kafka.connector.json.ConnectorDefUtils
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestPerfDefinition extends WithBrokerWorker {
  private[this] val perfSource                 = new PerfSource
  private[this] val connectorAdmin             = ConnectorAdmin(testUtil().workersConnProps())
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def checkBatch(): Unit = {
    val definition = perfSource.settingDefinitions().get(PERF_BATCH_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultInt() shouldBe PERF_BATCH_DEFAULT
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.INT
  }

  @Test
  def checkFrequence(): Unit = {
    val definition = perfSource.settingDefinitions().get(PERF_FREQUENCY_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultDuration() shouldBe java.time.Duration.ofMillis(PERF_FREQUENCY_DEFAULT.toMillis)
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.DURATION
  }

  @Test
  def testSource(): Unit = {
    val topicKey = TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val response = result(
      connectorAdmin
        .connectorValidator()
        .connectorKey(ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5)))
        .numberOfTasks(1)
        .topicKey(topicKey)
        .connectorClass(classOf[PerfSource])
        .run()
    )

    response.settings().size should not be 0
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.TOPIC_NAMES_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.REQUIRED
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.CONNECTOR_CLASS_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.REQUIRED
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.NUMBER_OF_TASKS_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.OPTIONAL
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.COLUMNS_DEFINITION.key())
      .head
      .definition()
      .necessary() should not be Necessary.REQUIRED
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.WORKER_CLUSTER_KEY_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.REQUIRED
    response.errorCount() shouldBe 0
  }
} 
Example 57
Source File: TestConsoleSinkTask.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.console

import java.util.concurrent.TimeUnit

import oharastream.ohara.common.data.Row
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.{ConnectorKey, TopicKey}
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.kafka.connector.json.ConnectorDefUtils
import org.apache.kafka.connect.sink.SinkRecord
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.duration.Duration
import scala.jdk.CollectionConverters._

class TestConsoleSinkTask extends OharaTest {
  private[this] val connectorKey = ConnectorKey.of("group", "TestConsoleSinkTask")
  private[this] def configs(key: String, value: String): java.util.Map[String, String] =
    Map(
      ConnectorDefUtils.CONNECTOR_KEY_DEFINITION.key()  -> ConnectorKey.toJsonString(connectorKey),
      ConnectorDefUtils.CONNECTOR_NAME_DEFINITION.key() -> CommonUtils.randomString(),
      key                                               -> value
    ).asJava

  @Test
  def testEmptySetting(): Unit = {
    val task = new ConsoleSinkTask()
    task.start(
      Map(
        ConnectorDefUtils.CONNECTOR_KEY_DEFINITION.key()  -> ConnectorKey.toJsonString(connectorKey),
        ConnectorDefUtils.CONNECTOR_NAME_DEFINITION.key() -> CommonUtils.randomString()
      ).asJava
    )
    task.freq shouldBe CONSOLE_FREQUENCE_DEFAULT
    task.divider shouldBe CONSOLE_ROW_DIVIDER_DEFAULT
  }

  @Test
  def testFrequence(): Unit = {
    val task = new ConsoleSinkTask()
    task.start(configs(CONSOLE_FREQUENCE, "20 seconds"))
    task.freq shouldBe Duration(20, TimeUnit.SECONDS)
  }

  @Test
  def testDivider(): Unit = {
    val task    = new ConsoleSinkTask()
    val divider = CommonUtils.randomString()
    task.start(configs(CONSOLE_ROW_DIVIDER, divider))
    task.divider shouldBe divider
  }

  @Test
  def testPrint(): Unit = {
    val task = new ConsoleSinkTask()
    task.start(configs(CONSOLE_FREQUENCE, "2 seconds"))
    task.lastLog shouldBe -1

    task.put(java.util.List.of())
    task.lastLog shouldBe -1

    putRecord(task)
    val lastLogCopy1 = task.lastLog
    lastLogCopy1 should not be -1

    TimeUnit.SECONDS.sleep(1)

    putRecord(task)
    val lastLogCopy2 = task.lastLog
    lastLogCopy2 shouldBe lastLogCopy1

    TimeUnit.SECONDS.sleep(1)

    putRecord(task)
    val lastLogCopy3 = task.lastLog
    lastLogCopy3 should not be lastLogCopy2
    lastLogCopy3 should not be -1
  }

  private[this] def putRecord(task: ConsoleSinkTask): Unit =
    task.put(
      java.util.List.of(
        new SinkRecord(
          TopicKey.of("g", "n").topicNameOnKafka(),
          1,
          null,
          Row.EMPTY,
          null,
          null,
          1
        )
      )
    )
} 
Example 58
Source File: TestConsoleSink.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.console

import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
class TestConsoleSink extends OharaTest {
  @Test
  def testFrequenceDefinitions(): Unit = {
    val sink = new ConsoleSink

    val freqDef = sink.settingDefinitions().get(CONSOLE_FREQUENCE)
    freqDef.documentation() shouldBe CONSOLE_FREQUENCE_DOC
    freqDef.defaultDuration() shouldBe java.time.Duration.ofMillis(CONSOLE_FREQUENCE_DEFAULT.toMillis)
  }

  @Test
  def testDividerDefinitions(): Unit = {
    val sink = new ConsoleSink

    val dividerDef = sink.settingDefinitions().get(CONSOLE_ROW_DIVIDER)
    dividerDef.documentation() shouldBe CONSOLE_ROW_DIVIDER_DOC
    dividerDef.defaultString() shouldBe CONSOLE_ROW_DIVIDER_DEFAULT
  }
} 
Example 59
Source File: TestHDFSSinkConfig.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.hdfs.sink

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.kafka.connector.TaskSetting
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._

class TestHDFSSinkConfig extends OharaTest {
  private[this] val HDFS_URL_VALUE = "hdfs://test:9000"

  private[this] def hdfsConfig(settings: Map[String, String]): HDFSSinkProps =
    HDFSSinkProps(TaskSetting.of(settings.asJava))

  @Test
  def testGetDataDir(): Unit = {
    val hdfsSinkConfig: HDFSSinkProps = hdfsConfig(Map(HDFS_URL_KEY -> HDFS_URL_VALUE))
    hdfsSinkConfig.hdfsURL shouldBe HDFS_URL_VALUE
  }

  @Test
  def testReplication(): Unit = {
    val hdfsSinkConfig: HDFSSinkProps = hdfsConfig(
      Map(HDFS_URL_KEY -> HDFS_URL_VALUE, HDFS_REPLICATION_NUMBER_KEY -> "2")
    )
    hdfsSinkConfig.replicationNumber shouldBe 2
  }
} 
Example 60
Source File: TestIllegalFtpFileSystem.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.filesystem.ftp

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.service.FtpServer
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

class TestIllegalFtpFileSystem extends OharaTest {
  private[this] val server = FtpServer.local()

  private[this] val fileSystem =
    FtpFileSystem.builder
    // login ftp server with an invalid account and then see what happens :)
      .user(CommonUtils.randomString(10))
      .password(server.password)
      .hostname(server.hostname)
      .port(server.port)
      .build()

  @Test
  def testList(): Unit = an[Throwable] should be thrownBy fileSystem.listFileNames("/")

  @Test
  def testExist(): Unit = an[Throwable] should be thrownBy fileSystem.exists("/")

  @Test
  def testNonExist(): Unit = an[Throwable] should be thrownBy fileSystem.nonExists("/")

  @Test
  def testMkDir(): Unit = an[Throwable] should be thrownBy fileSystem.mkdirs(s"/${CommonUtils.randomString(10)}")

  @Test
  def testWorkingFolder(): Unit = an[Throwable] should be thrownBy fileSystem.workingFolder()

  @Test
  def testFileType(): Unit = an[Throwable] should be thrownBy fileSystem.fileType("/")

  @Test
  def testOpen(): Unit = an[Throwable] should be thrownBy fileSystem.open(s"/${CommonUtils.randomString(10)}")

  @Test
  def testWrite(): Unit = an[Throwable] should be thrownBy fileSystem.append(s"/${CommonUtils.randomString(10)}")

  @Test
  def testReMkdirs(): Unit = an[Throwable] should be thrownBy fileSystem.reMkdirs(s"/${CommonUtils.randomString(10)}")

  @Test
  def testDelete(): Unit = an[Throwable] should be thrownBy fileSystem.delete(s"/${CommonUtils.randomString(10)}")

  @Test
  def testAttach(): Unit =
    an[Throwable] should be thrownBy fileSystem.attach(s"/${CommonUtils.randomString(10)}", "abc")

  @After
  def tearDown(): Unit = {
    Releasable.close(fileSystem)
    Releasable.close(server)
  }
} 
Example 61
Source File: TestHdfsFileSystem.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.filesystem.hdfs

import java.io.{BufferedWriter, File, OutputStreamWriter}
import java.nio.charset.StandardCharsets

import oharastream.ohara.client.filesystem.{FileFilter, FileSystem, FileSystemTestBase}
import oharastream.ohara.common.exception.FileSystemException
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestHdfsFileSystem extends FileSystemTestBase {
  private[this] val tempFolder: File = CommonUtils.createTempFolder("local_hdfs")

  private[this] val hdfsURL: String = new File(tempFolder.getAbsolutePath).toURI.toString

  override protected val fileSystem: FileSystem = FileSystem.hdfsBuilder.url(hdfsURL).build

  override protected val rootDir: String = tempFolder.toString

  // override this method because the Local HDFS doesn't support append()
  @Test
  override def testAppend(): Unit = {
    val file = randomFile()
    fileSystem.create(file).close()

    intercept[FileSystemException] {
      fileSystem.append(file)
    }.getMessage shouldBe "Not supported"
  }

  // override this method because the Local HDFS doesn't support append()
  @Test
  override def testDeleteFileThatHaveBeenRead(): Unit = {
    val file              = randomFile(rootDir)
    val data: Seq[String] = Seq("123", "456")
    val writer            = new BufferedWriter(new OutputStreamWriter(fileSystem.create(file), StandardCharsets.UTF_8))
    try data.foreach(line => {
      writer.append(line)
      writer.newLine()
    })
    finally writer.close()

    fileSystem.exists(file) shouldBe true
    fileSystem.readLines(file) shouldBe data
    fileSystem.delete(file)
    fileSystem.exists(file) shouldBe false
    fileSystem.listFileNames(rootDir, FileFilter.EMPTY).size shouldBe 0
  }
} 
Example 62
Source File: TestShabondiApi.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator

import oharastream.ohara.client.configurator.ShabondiApi.SOURCE_ALL_DEFINITIONS
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.shabondi.ShabondiDefinitions._
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json._

class TestShabondiApi extends OharaTest {
  @Test
  def testSourceDefinitions(): Unit =
    ShabondiApi.SHABONDI_CLUSTER_CREATION_FORMAT
      .read(s"""
                |  {
                |    "group": "g",
                |    "name": "n",
                |    "${CLIENT_PORT_DEFINITION.key()}": 123,
                |    "${BROKER_CLUSTER_KEY_DEFINITION.key()}": {
                |      "group": "b",
                |      "name": "n"
                |    },
                |    "${NODE_NAMES_DEFINITION.key()}": ["nn"],
                |    "${SHABONDI_CLASS_DEFINITION.key()}": "${ShabondiApi.SHABONDI_SOURCE_CLASS_NAME}"
                |  }
                |""".stripMargin.parseJson)
      .definitions shouldBe SOURCE_ALL_DEFINITIONS

  @Test
  def testSinkDefinitions(): Unit =
    ShabondiApi.SHABONDI_CLUSTER_CREATION_FORMAT
      .read(s"""
                |  {
                |    "group": "g",
                |    "name": "n",
                |    "${CLIENT_PORT_DEFINITION.key()}": 123,
                |    "${BROKER_CLUSTER_KEY_DEFINITION.key()}": {
                |      "group": "b",
                |      "name": "n"
                |    },
                |    "${NODE_NAMES_DEFINITION.key()}": ["nn"],
                |    "${SHABONDI_CLASS_DEFINITION.key()}": "${ShabondiApi.SHABONDI_SINK_CLASS_NAME}"
                |  }
                |""".stripMargin.parseJson)
      .definitions shouldBe SOURCE_ALL_DEFINITIONS
} 
Example 63
Source File: TestVolumeApi.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator

import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestVolumeApi extends OharaTest {
  @Test
  def testOnlyPath(): Unit =
    VolumeApi.access.request
      .path(CommonUtils.randomString())
      .creation

  @Test
  def testNameInCreation(): Unit =
    VolumeApi.access.request
      .name("ab")
      .path(CommonUtils.randomString())
      .creation
      .name shouldBe "ab"

  @Test
  def testGroupInCreation(): Unit =
    VolumeApi.access.request
      .group("ab")
      .path(CommonUtils.randomString())
      .creation
      .group shouldBe "ab"

  @Test
  def testKeyInCreation(): Unit = {
    val creation = VolumeApi.access.request
      .key(ObjectKey.of("g", "n"))
      .path(CommonUtils.randomString())
      .creation
    creation.group shouldBe "g"
    creation.name shouldBe "n"
  }

  @Test
  def testPathInCreation(): Unit =
    VolumeApi.access.request
      .path("a")
      .creation
      .path shouldBe "a"

  @Test
  def testPathInUpdating(): Unit =
    VolumeApi.access.request
      .path("a")
      .updating
      .path
      .get shouldBe "a"

  @Test
  def testDefaultTagsInCreation(): Unit =
    VolumeApi.access.request
      .path(CommonUtils.randomString())
      .creation
      .tags shouldBe Map.empty

  @Test
  def testDefaultTagsInUpdating(): Unit =
    VolumeApi.access.request.updating.tags shouldBe None
} 
Example 64
Source File: TestFileInfoApi.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator

import java.io.File

import oharastream.ohara.client.configurator.FileInfoApi.FileInfo
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json.JsString

import scala.concurrent.ExecutionContext.Implicits.global
class TestFileInfoApi extends OharaTest {
  private[this] def access: FileInfoApi.Access = FileInfoApi.access.hostname(CommonUtils.hostname()).port(22)

  @Test
  def nullKeyInGet(): Unit =
    an[NullPointerException] should be thrownBy access.get(null)

  @Test
  def nullKeyInDelete(): Unit =
    an[NullPointerException] should be thrownBy access.delete(null)

  @Test
  def emptyName(): Unit = an[IllegalArgumentException] should be thrownBy access.request.name("")

  @Test
  def nullName(): Unit = an[NullPointerException] should be thrownBy access.request.name(null)

  @Test
  def emptyGroup(): Unit = an[IllegalArgumentException] should be thrownBy access.request.group("")

  @Test
  def nullGroup(): Unit = an[NullPointerException] should be thrownBy access.request.group(null)

  @Test
  def nullFile(): Unit = an[NullPointerException] should be thrownBy access.request.file(null)

  @Test
  def nonexistentFile(): Unit =
    an[IllegalArgumentException] should be thrownBy access.request.file(new File(CommonUtils.randomString(5)))

  @Test
  def nullTags(): Unit = an[NullPointerException] should be thrownBy access.request.tags(null)

  @Test
  def emptyTags(): Unit = access.request.tags(Map.empty)

  @Test
  def bytesMustBeEmptyAfterSerialization(): Unit = {
    val bytes = CommonUtils.randomString().getBytes()
    val fileInfo = new FileInfo(
      group = CommonUtils.randomString(),
      name = CommonUtils.randomString(),
      lastModified = CommonUtils.current(),
      bytes = bytes,
      url = None,
      classInfos = Seq.empty,
      tags = Map("a" -> JsString("b"))
    )

    val copy = FileInfoApi.FILE_INFO_FORMAT.read(FileInfoApi.FILE_INFO_FORMAT.write(fileInfo))
    copy.group shouldBe fileInfo.group
    copy.name shouldBe fileInfo.name
    copy.lastModified shouldBe fileInfo.lastModified
    copy.bytes shouldBe Array.empty
    copy.url shouldBe fileInfo.url
    copy.tags shouldBe fileInfo.tags
  }

  @Test
  def nullUrlShouldBeRemoved(): Unit = {
    val fileInfo = new FileInfo(
      group = CommonUtils.randomString(),
      name = CommonUtils.randomString(),
      lastModified = CommonUtils.current(),
      bytes = Array.emptyByteArray,
      url = None,
      classInfos = Seq.empty,
      tags = Map("a" -> JsString("b"))
    )
    FileInfoApi.FILE_INFO_FORMAT.write(fileInfo).asJsObject.fields should not contain "url"
  }
} 
Example 65
Source File: TestObjectApi.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator

import oharastream.ohara.client.configurator.ObjectApi._
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json.JsString
class TestObjectApi extends OharaTest {
  @Test
  def testSetLastModified(): Unit = ObjectInfo(Map.empty, 123).lastModified shouldBe 123

  @Test
  def testEquals(): Unit =
    ObjectInfo(Map("a" -> JsString("b")), 123) shouldBe ObjectInfo(Map("a" -> JsString("b")), 123)

  @Test
  def testNameInCreation(): Unit = ObjectApi.access.request.name("ab").creation.name shouldBe "ab"

  @Test
  def testGroupInCreation(): Unit = ObjectApi.access.request.name("ab").group("ab").creation.name shouldBe "ab"

  @Test
  def testKeyInCreation(): Unit = {
    val creation = ObjectApi.access.request.key(ObjectKey.of("g", "n")).creation
    creation.group shouldBe "g"
    creation.name shouldBe "n"
  }

  @Test
  def testTagsInCreation(): Unit =
    ObjectApi.access.request.key(ObjectKey.of("g", "n")).creation.tags shouldBe Map.empty

  @Test
  def testDefaultGroup(): Unit =
    ObjectApi.access.request.name(CommonUtils.randomString(10)).creation.group shouldBe GROUP_DEFAULT
} 
Example 66
Source File: TestDatabaseClient.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client

import oharastream.ohara.client.configurator.InspectApi.RdbColumn
import oharastream.ohara.client.database.DatabaseClient
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.service.Database
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

class TestDatabaseClient extends OharaTest {
  private[this] val db = Database.local()

  private[this] val client = DatabaseClient.builder.url(db.url()).user(db.user()).password(db.password()).build

  private[this] val increasedNumber = client.databaseType match {
    // postgresql generate one table called "xxx_pkey"
    case "postgresql" => 2
    case _            => 1
  }
  @Test
  def testList(): Unit = {
    val before    = client.tables().size
    val tableName = CommonUtils.randomString(10)
    val cf0       = RdbColumn("cf0", "INTEGER", true)
    val cf1       = RdbColumn("cf1", "INTEGER", false)
    val cf2       = RdbColumn("cf2", "INTEGER", false)
    client.createTable(tableName, Seq(cf2, cf0, cf1))
    try {
      val after = client.tables().size
      after - before shouldBe increasedNumber
    } finally client.dropTable(tableName)
  }

  @Test
  def testCreate(): Unit = {
    // postgresql use lower case...
    val tableName = CommonUtils.randomString(10)
    val cf0       = RdbColumn("cf0", "INTEGER", true)
    val cf1       = RdbColumn("cf1", "INTEGER", true)
    val cf2       = RdbColumn("cf2", "INTEGER", false)
    val before    = client.tables().size
    client.createTable(tableName, Seq(cf2, cf0, cf1))
    try {
      client.tables().size - before shouldBe increasedNumber
      val cfs = client.tableQuery.tableName(tableName).execute().head.columns
      cfs.size shouldBe 3
      cfs.filter(_.name == "cf0").head.pk shouldBe true
      cfs.filter(_.name == "cf1").head.pk shouldBe true
      cfs.filter(_.name == "cf2").head.pk shouldBe false
    } finally client.dropTable(tableName)
  }

  @Test
  def testDrop(): Unit = {
    val tableName = CommonUtils.randomString(10)
    val cf0       = RdbColumn("cf0", "INTEGER", true)
    val cf1       = RdbColumn("cf1", "INTEGER", false)
    client.createTable(tableName, Seq(cf0, cf1))
    val before = client.tables().size
    client.dropTable(tableName)
    before - client.tables().size shouldBe increasedNumber
  }

  @Test
  def nullUrl(): Unit = an[NullPointerException] should be thrownBy DatabaseClient.builder.url(null)

  @Test
  def emptyUrl(): Unit = an[IllegalArgumentException] should be thrownBy DatabaseClient.builder.url("")

  @Test
  def testUser(): Unit = {
    // USER is optional to jdbc so null and empty string are legal
    DatabaseClient.builder.user(null)
    DatabaseClient.builder.user("")
  }

  @Test
  def testPassword(): Unit = {
    // PASSWORD is optional to jdbc so null and empty string are legal
    DatabaseClient.builder.password(null)
    DatabaseClient.builder.password("")
  }

  @After
  def tearDown(): Unit = {
    Releasable.close(client)
    Releasable.close(db)
  }
} 
Example 67
Source File: TestVersion.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client

import oharastream.ohara.client.configurator.{BrokerApi, WorkerApi, ZookeeperApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.VersionUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestVersion extends OharaTest {
  @Test
  def testZookeeper(): Unit = {
    ZookeeperApi.IMAGE_NAME_DEFAULT shouldBe s"oharastream/zookeeper:${VersionUtils.VERSION}"
  }

  @Test
  def testBroker(): Unit = {
    BrokerApi.IMAGE_NAME_DEFAULT shouldBe s"oharastream/broker:${VersionUtils.VERSION}"
  }

  @Test
  def testWorker(): Unit = {
    WorkerApi.IMAGE_NAME_DEFAULT shouldBe s"oharastream/connect-worker:${VersionUtils.VERSION}"
  }
} 
Example 68
Source File: Test873.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.{Http, server}
import oharastream.ohara.client.kafka.WorkerJson.{ConnectorCreationResponse, KafkaConnectorTaskId, _}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.kafka.connector.json.Creation
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.jdk.CollectionConverters._


class Test873 extends OharaTest {
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(60, TimeUnit.SECONDS))

  @Test
  def testCreateConnector(): Unit = {
    val className = CommonUtils.randomString()
    val settings = Map(
      CommonUtils.randomString() -> CommonUtils.randomString()
    )
    val tasks = Seq(
      KafkaConnectorTaskId(
        connector = CommonUtils.randomString(),
        task = 10
      )
    )
    val server = toServer {
      path("connectors") {
        post {
          entity(as[Creation]) { req =>
            complete(
              ConnectorCreationResponse(
                name = req.name(),
                config = req.configs().asScala.toMap,
                tasks = tasks
              )
            )
          }
        }
      }
    }

    try {
      val connectorKey = ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
      val client       = ConnectorAdmin(s"${server.hostname}:${server.port}")
      val response = result(
        client.connectorCreator().connectorKey(connectorKey).settings(settings).className(className).create()
      )
      response.name shouldBe connectorKey.connectorNameOnKafka()
      response.tasks shouldBe tasks
      settings.foreach {
        case (k, v) =>
          response.config(k) shouldBe v
      }
    } finally server.close()
  }

  private[this] def toServer(route: server.Route): SimpleServer = {
    implicit val system: ActorSystem = ActorSystem("my-system")
    val server                       = Await.result(Http().bindAndHandle(route, "localhost", 0), Duration(30, TimeUnit.SECONDS))
    new SimpleServer {
      override def hostname: String = server.localAddress.getHostString
      override def port: Int        = server.localAddress.getPort
      override def close(): Unit = {
        Await.result(server.unbind(), Duration(30, TimeUnit.SECONDS))
        Await.result(system.terminate(), Duration(30, TimeUnit.SECONDS))
      }
    }
  }
} 
Example 69
Source File: TestConnectorCreator.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

class TestConnectorCreator extends OharaTest {
  private[this] val notWorkingClient = ConnectorAdmin("localhost:2222")

  @Test
  def nullConfigs(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorCreator().settings(null)

  @Test
  def nullSchema(): Unit = an[NullPointerException] should be thrownBy notWorkingClient.connectorCreator().columns(null)

  @Test
  def nullConnectorKey(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorCreator().connectorKey(null)

  @Test
  def illegalNumberOfTasks(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorCreator().numberOfTasks(-1)

  @Test
  def nullClass(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient
      .connectorCreator()
      .connectorClass(null.asInstanceOf[Class[_]])

  @Test
  def nullClassName(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorCreator().className(null.asInstanceOf[String])

  @Test
  def emptyClassName(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorCreator().className("")

  @Test
  def nullTopicKey(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorCreator().topicKey(null)

  @Test
  def nullTopicKeys(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorCreator().topicKeys(null)

  @Test
  def emptyTopicKeys(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorCreator().topicKeys(Set.empty)
} 
Example 70
Source File: TestConnectorValidator.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import scala.concurrent.ExecutionContext.Implicits.global
class TestConnectorValidator extends OharaTest {
  
  private[this] val notWorkingClient = ConnectorAdmin("localhost:2222")

  @Test
  def ignoreClassName(): Unit =
    an[NoSuchElementException] should be thrownBy notWorkingClient.connectorValidator().run()

  @Test
  def nullSettingKey(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().setting(null, "asdsad")

  @Test
  def emptySettingKey(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().setting("", "asdsad")

  @Test
  def nullSettingValue(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().setting("asdsad", null)

  @Test
  def emptySettingValue(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().setting("asdsad", "")

  @Test
  def nullSettings(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().settings(null)

  @Test
  def emptySettings(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().settings(Map.empty)

  @Test
  def nullSchema(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().columns(null)

  @Test
  def illegalNumberOfTasks(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().numberOfTasks(-1)

  @Test
  def nullClass(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient
      .connectorValidator()
      .connectorClass(null.asInstanceOf[Class[_]])

  @Test
  def nullClassName(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient
      .connectorValidator()
      .className(null.asInstanceOf[String])

  @Test
  def emptyClassName(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().className("")

  @Test
  def nullTopicKey(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().topicKey(null)

  @Test
  def nullTopicKeys(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().topicKeys(null)

  @Test
  def emptyTopicKeys(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().topicKeys(Set.empty)
} 
Example 71
Source File: TestWorkerJson.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import oharastream.ohara.client.kafka.WorkerJson._
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.kafka.connector.json.{Creation, Validation}
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json._
class TestWorkerJson extends OharaTest {
  @Test
  def testValidatedValue(): Unit = {
    val validatedValue = KafkaValidatedValue(
      name = CommonUtils.randomString(5),
      value = Some("String"),
      errors = Seq(CommonUtils.randomString(5), CommonUtils.randomString(5))
    )
    KAFKA_VALIDATED_VALUE_FORMAT.read(KAFKA_VALIDATED_VALUE_FORMAT.write(validatedValue)) shouldBe validatedValue
  }

  @Test
  def testValidatedValueFromString(): Unit = {
    val name           = CommonUtils.randomString(5)
    val value          = CommonUtils.randomString(5)
    val error          = CommonUtils.randomString(5)
    val validatedValue = KAFKA_VALIDATED_VALUE_FORMAT.read(s"""
                                               |{
                                               |  "name":"$name",
                                               |  "value":"$value",
                                               |  "errors":["$error", "$error"]
                                               |}
                                            """.stripMargin.parseJson)
    validatedValue.name shouldBe name
    validatedValue.value shouldBe Some(value)
    validatedValue.errors shouldBe Seq(error, error)
  }

  @Test
  def testValidatedValueFromStringWithoutValue(): Unit = {
    val name           = CommonUtils.randomString(5)
    val error          = CommonUtils.randomString(5)
    val validatedValue = KAFKA_VALIDATED_VALUE_FORMAT.read(s"""
                                                        |{
                                                        |  "name":"$name",
                                                        |  "errors":["$error", "$error"]
                                                        |}
                                            """.stripMargin.parseJson)
    validatedValue.name shouldBe name
    validatedValue.value shouldBe None
    validatedValue.errors shouldBe Seq(error, error)
  }

  @Test
  def testValidatedValueFromStringWithEmptyValue(): Unit = {
    val name           = CommonUtils.randomString(5)
    val error          = CommonUtils.randomString(5)
    val validatedValue = KAFKA_VALIDATED_VALUE_FORMAT.read(s"""
                                                        |{
                                                        |  "name":"$name",
                                                        |  "value":"",
                                                        |  "errors":["$error", "$error"]
                                                        |}
                                            """.stripMargin.parseJson)
    validatedValue.name shouldBe name
    validatedValue.value shouldBe None
    validatedValue.errors shouldBe Seq(error, error)
  }

  @Test
  def testValidatedValueFromStringWithNullValue(): Unit = {
    val name           = CommonUtils.randomString(5)
    val error          = CommonUtils.randomString(5)
    val validatedValue = KAFKA_VALIDATED_VALUE_FORMAT.read(s"""
                                                        |{
                                                        |  "name":"$name",
                                                        |  "value":null,
                                                        |  "errors":["$error", "$error"]
                                                        |}
                                            """.stripMargin.parseJson)
    validatedValue.name shouldBe name
    validatedValue.value shouldBe None
    validatedValue.errors shouldBe Seq(error, error)
  }

  @Test
  def testCreation(): Unit = {
    val creation = Creation.of(CommonUtils.randomString(), CommonUtils.randomString(), CommonUtils.randomString())
    creation shouldBe CREATION_FORMAT.read(CREATION_FORMAT.write(creation))
  }

  @Test
  def testValidation(): Unit = {
    val validation = Validation.of(java.util.Map.of(CommonUtils.randomString(), CommonUtils.randomString()))
    validation shouldBe KAFKA_VALIDATION_FORMAT.read(KAFKA_VALIDATION_FORMAT.write(validation))
  }
} 
Example 72
Source File: ChronoUnitTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.testsuite.javalib.time.temporal

import java.time.temporal.ChronoUnit

import org.junit.Test
import org.junit.Assert._

class ChronoUnitTest {
  import ChronoUnit._

  @Test def test_isDurationEstimated(): Unit = {
    for (u <- ChronoUnit.values)
      assertTrue(u.isDurationEstimated != u.isTimeBased)
  }

  @Test def test_isDateBased(): Unit = {
    assertFalse(NANOS.isDateBased)
    assertFalse(MICROS.isDateBased)
    assertFalse(MILLIS.isDateBased)
    assertFalse(SECONDS.isDateBased)
    assertFalse(MINUTES.isDateBased)
    assertFalse(HOURS.isDateBased)
    assertFalse(HALF_DAYS.isDateBased)
    assertTrue(DAYS.isDateBased)
    assertTrue(WEEKS.isDateBased)
    assertTrue(MONTHS.isDateBased)
    assertTrue(YEARS.isDateBased)
    assertTrue(DECADES.isDateBased)
    assertTrue(CENTURIES.isDateBased)
    assertTrue(MILLENNIA.isDateBased)
    assertTrue(ERAS.isDateBased)
    assertFalse(FOREVER.isDateBased)
  }

  @Test def test_isTimeBased(): Unit = {
    assertTrue(NANOS.isTimeBased)
    assertTrue(MICROS.isTimeBased)
    assertTrue(MILLIS.isTimeBased)
    assertTrue(SECONDS.isTimeBased)
    assertTrue(MINUTES.isTimeBased)
    assertTrue(HOURS.isTimeBased)
    assertTrue(HALF_DAYS.isTimeBased)
    assertFalse(DAYS.isTimeBased)
    assertFalse(WEEKS.isTimeBased)
    assertFalse(MONTHS.isTimeBased)
    assertFalse(YEARS.isTimeBased)
    assertFalse(DECADES.isTimeBased)
    assertFalse(CENTURIES.isTimeBased)
    assertFalse(MILLENNIA.isTimeBased)
    assertFalse(ERAS.isTimeBased)
    assertFalse(FOREVER.isTimeBased)
  }

  @Test def test_values(): Unit = {
    val units = Array[AnyRef](NANOS, MICROS, MILLIS, SECONDS, MINUTES, HOURS,
        HALF_DAYS, DAYS, WEEKS, MONTHS, YEARS, DECADES, CENTURIES, MILLENNIA,
        ERAS, FOREVER)
    assertArrayEquals(units, values.asInstanceOf[Array[AnyRef]])
  }

  @Test def test_valueOf(): Unit = {
    assertEquals(NANOS, valueOf("NANOS"))
    assertEquals(MICROS, valueOf("MICROS"))
    assertEquals(MILLIS, valueOf("MILLIS"))
    assertEquals(SECONDS, valueOf("SECONDS"))
    assertEquals(MINUTES, valueOf("MINUTES"))
    assertEquals(HOURS, valueOf("HOURS"))
    assertEquals(HALF_DAYS, valueOf("HALF_DAYS"))
    assertEquals(DAYS, valueOf("DAYS"))
    assertEquals(WEEKS, valueOf("WEEKS"))
    assertEquals(MONTHS, valueOf("MONTHS"))
    assertEquals(YEARS, valueOf("YEARS"))
    assertEquals(DECADES, valueOf("DECADES"))
    assertEquals(CENTURIES, valueOf("CENTURIES"))
    assertEquals(MILLENNIA, valueOf("MILLENNIA"))
    assertEquals(ERAS, valueOf("ERAS"))
    assertEquals(FOREVER, valueOf("FOREVER"))
  }
} 
Example 73
Source File: TemporalTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.testsuite.javalib.time

import java.time.temporal._

import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._

abstract class TemporalTest[Temp <: Temporal] extends TemporalAccessorTest[Temp] {
  import DateTimeTestUtil._

  def isSupported(unit: ChronoUnit): Boolean

  val sampleLongs = Seq(
      Long.MinValue, Int.MinValue.toLong, -1000000000L, -86400L,
      -3600L, -366L, -365L, -60L, -24L, -7L, -1L, 0L,
      1L, 7L, 24L, 60L, 365L, 366L, 3600L, 86400L, 1000000000L,
      Int.MaxValue.toLong, Long.MaxValue)

  @Test def isSupported_TemporalUnit(): Unit = {
    for {
      temporal <- samples
      unit <- ChronoUnit.values
    } {
      if (isSupported(unit))
        assertTrue(temporal.isSupported(unit))
      else
        assertFalse(temporal.isSupported(unit))
    }
    for (temporal <- samples)
      assertFalse(temporal.isSupported(null: TemporalUnit))
  }

  @Test def with_unsupported_field(): Unit = {
    for {
      temporal <- samples
      field <- ChronoField.values if !temporal.isSupported(field)
      n <- sampleLongs.filter(field.range.isValidValue)
    } {
      expectThrows(classOf[UnsupportedTemporalTypeException],
          temporal.`with`(field, n))
    }
  }

  @Test def plus_unsupported_unit(): Unit = {
    for {
      temporal <- samples
      unit <- ChronoUnit.values if !temporal.isSupported(unit)
      n <- sampleLongs
    } {
      expectThrows(classOf[UnsupportedTemporalTypeException],
          temporal.plus(n, unit))
    }
  }

  @Test def minus(): Unit = {
    for {
      temporal <- samples
      unit <- ChronoUnit.values if temporal.isSupported(unit)
      n <- sampleLongs
    } {
      testDateTime(temporal.minus(n, unit)) {
        if (n != Long.MinValue) temporal.plus(-n, unit)
        else temporal.plus(Long.MaxValue, unit).plus(1, unit)
      }
    }
  }

  @Test def minus_unsupported_unit(): Unit = {
    for {
      temporal <- samples
      unit <- ChronoUnit.values if !temporal.isSupported(unit)
      n <- sampleLongs
    } {
      expectThrows(classOf[UnsupportedTemporalTypeException],
          temporal.minus(n, unit))
    }
  }

  @Test def until_unsupported_unit(): Unit = {
    for {
      temporal1 <- samples
      temporal2 <- samples
      unit <- ChronoUnit.values if !temporal1.isSupported(unit)
    } {
      expectThrows(classOf[UnsupportedTemporalTypeException],
          temporal1.until(temporal2, unit))
    }
  }
} 
Example 74
Source File: TemporalAccessorTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.testsuite.javalib.time

import java.time.DateTimeException
import java.time.temporal._

import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._

abstract class TemporalAccessorTest[TempAcc <: TemporalAccessor] {
  val samples: Seq[TempAcc]

  def isSupported(field: ChronoField): Boolean

  @Test def isSupported_TemporalField(): Unit = {
    for {
      accessor <- samples
      field <- ChronoField.values
    } {
      if (isSupported(field))
        assertTrue(accessor.isSupported(field))
      else
        assertFalse(accessor.isSupported(field))
    }
    for (accessor <- samples)
      assertFalse(accessor.isSupported(null))
  }

  def expectedRangeFor(accessor: TempAcc, field: TemporalField): ValueRange = field.range()

  @Test final def range(): Unit = {
    for {
      accessor <- samples
      field <- ChronoField.values
    } {
      if (accessor.isSupported(field)) {
        val expected = expectedRangeFor(accessor, field)
        assertEquals(expected, accessor.range(field))
      } else {
        expectThrows(classOf[UnsupportedTemporalTypeException], accessor.range(field))
      }
    }
  }

  @Test def get(): Unit = {
    for {
      accessor <- samples
      field <- ChronoField.values
    } {
      if (accessor.isSupported(field) && field.range.isIntValue)
        assertEquals(accessor.getLong(field), accessor.get(field).toLong)
      else if (accessor.isSupported(field))
        expectThrows(classOf[DateTimeException], accessor.get(field))
      else
        expectThrows(classOf[UnsupportedTemporalTypeException], accessor.get(field))
    }
  }

  @Test def getLong_unsupported_field(): Unit = {
    for {
      accessor <- samples
      field <- ChronoField.values() if !accessor.isSupported(field)
    } {
      expectThrows(classOf[UnsupportedTemporalTypeException],
          accessor.getLong(field))
    }
  }
} 
Example 75
Source File: TemporalAmountTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.testsuite.javalib.time

import java.time.temporal.{UnsupportedTemporalTypeException, ChronoUnit, TemporalAmount}

import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._

abstract class TemporalAmountTest {
  val samples: Seq[TemporalAmount]

  val units: Seq[ChronoUnit]

  @Test def test_get_unsupported_unit(): Unit = {
    val illegalUnits = ChronoUnit.values.filterNot(units.contains)
    for {
      amount <- samples
      unit <- illegalUnits
    } {
      expectThrows(classOf[UnsupportedTemporalTypeException], amount.get(unit))
    }
  }

  @Test def test_getUnits(): Unit = {
    for (amount <- samples)
      assertArrayEquals(units.toArray[AnyRef], amount.getUnits.toArray())
  }
} 
Example 76
Source File: IsoEraTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.testsuite.javalib.time.chrono

import java.time.DateTimeException
import java.time.chrono.IsoEra
import java.time.temporal.ChronoField

import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.javalib.time.TemporalAccessorTest
import org.scalajs.testsuite.utils.AssertThrows._

class IsoEraTest extends TemporalAccessorTest[IsoEra] {
  import IsoEra._

  val samples = values.toSeq

  def isSupported(field: ChronoField): Boolean =
    field == ChronoField.ERA

  @Test def test_getValue(): Unit = {
    assertEquals(0, BCE.getValue)
    assertEquals(1, CE.getValue)
  }

  @Test def test_getLong(): Unit = {
    for (era <- samples)
      assertEquals(era.getValue.toLong, era.getLong(ChronoField.ERA))
  }

  @Test def test_compareTo(): Unit = {
    assertEquals(0, BCE.compareTo(BCE))
    assertTrue(BCE.compareTo(CE) < 0)
    assertTrue(CE.compareTo(BCE) > 0)
    assertEquals(0, CE.compareTo(CE))
  }

  @Test def test_values(): Unit = {
    val eras = Array[AnyRef](BCE, CE)
    assertArrayEquals(eras, values.asInstanceOf[Array[AnyRef]])
  }

  @Test def test_valueOf(): Unit = {
    assertEquals(BCE, valueOf("BCE"))
    assertEquals(CE, valueOf("CE"))
    expectThrows(classOf[IllegalArgumentException], valueOf(""))
  }

  @Test def test_of(): Unit = {
    assertEquals(BCE, of(0))
    assertEquals(CE, of(1))

    for (n <- Seq(Int.MinValue, -1, 2, Int.MaxValue))
      expectThrows(classOf[DateTimeException], of(n))
  }
} 
Example 77
Source File: ChronologyTest.scala    From scala-js-java-time   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.scalajs.testsuite.javalib.time.chrono

import java.time.DateTimeException
import java.time.chrono.{IsoChronology, Chronology}

import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows._

class ChronologyTest {
  import Chronology._

  @Test def test_of(): Unit = {
    assertEquals(IsoChronology.INSTANCE, of("ISO"))
    expectThrows(classOf[DateTimeException], of(""))
  }

  @Test def test_getAvailableChronologies(): Unit = {
    val chronologies = Chronology.getAvailableChronologies
    assertTrue(chronologies.contains(IsoChronology.INSTANCE))
  }
} 
Example 78
Source File: SelectHiveQLByJDBCTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hive

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class SelectHiveQLByJDBCTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hive/SelectHiveQLByJDBC.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("SelectHiveQLByJdbcTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 79
Source File: PutHiveModeTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hive

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutHiveModeTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hive/PutHiveMode.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("PutHiveModeTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 80
Source File: SelectHiveQLTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hive

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class SelectHiveQLTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hive/SelectHiveQL.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("SelectHiveQLTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 81
Source File: PutHiveStreamingTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hive

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutHiveStreamingTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hive/PutHiveStreaming.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("PutHiveStreamingTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 82
Source File: PutHiveQLTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hive

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutHiveQLTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hive/PutHiveQL.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("PutHiveQLTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 83
Source File: JsonFolderParserTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.Json

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class JsonFolderParserTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/json/jsonFolder.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 84
Source File: JsonParserTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.Json

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class JsonParserTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/json/jsonParser.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 85
Source File: JsonSaveTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.Json

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class JsonSaveTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/json/jsonSave.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 86
Source File: JsonStringParserTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.Json

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class JsonStringParserTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/json/jsonStringParser.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 87
Source File: MysqlReadTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class MysqlReadTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/MysqlRead.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()
    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MysqlReadTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 88
Source File: MysqlReadIncrementalTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class MysqlReadIncrementalTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/MysqlReadIncremental.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MysqlReadIncrementalTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 89
Source File: MysqlWriteTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class MysqlWriteTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/MysqlWrite.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MysqlWriteTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 90
Source File: OracleWriteTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class OracleWriteTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/OracleWrite.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("OracleWriteTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 91
Source File: JdbcReadFromOracleTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class JdbcReadFromOracleTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/JdbcReadFromOracle.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("JdbcReadFromOracleTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 92
Source File: OracleReadTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class OracleReadTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/OracleRead.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("OracleReadTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 93
Source File: OracleReadByPartitionTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.JDBC

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class OracleReadByPartitionTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/jdbc/OracleReadByPartition.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("OracleReadByPartitionTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 94
Source File: ExecuteShellTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.script

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class ExecuteShellTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/script/shell.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 95
Source File: PythonTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.script

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.ServerIpUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON


class PythonTest {

  @Test
  def testPython() : Unit = {
    //parse flow json
    val file = "src/main/resources/flow/script/python.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    //execute flow
    val spark = SparkSession.builder()
      .master("local")
      //.   master("spark://10.0.86.89:7077")t
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      //.config("spark.yarn.appMasterEnv.PYSPARK_PYTHON","/usr/bin/python3")
      //.config("spark.jars","/opt/project/piflow/piflow-bundle/lib/jython-standalone-2.7.1.jar")
      .enableHiveSupport()
      .getOrCreate()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    spark.close();
    h2Server.stop()
  }

 } 
Example 96
Source File: ExecuteScalaTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.script

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil, ScalaExecutorUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class ExecuteScalaTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/script/scala.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val scalaExecutorJarList = ScalaExecutorUtil.buildScalaExcutorJar(flowBean)

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[3]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 97
Source File: PythonWithDataFrameTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.script

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.ServerIpUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON


class PythonWithDataFrameTest {


  @Test
  def testPythonWithDataFrame() : Unit = {
    //parse flow json
    val file = "src/main/resources/flow/script/pythonWithDataFrame.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    //execute flow
    val spark = SparkSession.builder()
      .master("local")
      //.   master("spark://10.0.86.89:7077")t
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      //.config("spark.yarn.appMasterEnv.PYSPARK_PYTHON","/usr/bin/python3")
      //.config("spark.jars","/opt/project/piflow/piflow-bundle/lib/jython-standalone-2.7.1.jar")
      .enableHiveSupport()
      .getOrCreate()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    spark.close();
    h2Server.stop()
  }


} 
Example 98
Source File: RunCypherTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.neo4j

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class RunCypherTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/neo4j/RunCypher.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("RunCypherTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 99
Source File: PutNeo4jTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.neo4j

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutNeo4jTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/neo4j/PutNeo4j.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("HiveToNeo4jTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 100
Source File: HiveToNeo4jTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.neo4j

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class HiveToNeo4jTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/neo4j/HiveToNeo4j.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("HiveToNeo4jTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 101
Source File: LoadFromFtpToHDFSTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.ftp

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class LoadFromFtpToHDFSTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/ftp/LoadFromFtpToHDFS.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("CsvParserTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris", PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 102
Source File: emblTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.ftp

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class emblTest {

  @Test
  def testEmblDataParse(): Unit ={

    //parse flow json
//    val file = "src/main/resources/yqd/down.json"
//val file = "src/main/resources/yqd/refseq_genome.json"
//val file = "src/main/resources/yqd/select_unzip.json"
val file = "src/main/resources/microorganism/gene.json"

    val flowJsonStr = FileUtil.fileReader(file)

    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("yarn")
      .appName("test18")
      .config("spark.deploy.mode","client")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "4")
      .config("hive.metastore.uris","thrift://10.0.88.64:9083")
      .config("spark.yarn.am.extraJavaOptions","-Dhdp.version=2.6.5.0-292")
      .config("spark.hadoop.yarn.resourcemanager.address","master2.packone:8050")
      .config("spark.hadoop.fs.defaultFS","hdfs://master2.packone:8020")
      .config("spark.jars","/git_1225/out/artifacts/piflow/piflow.jar")
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "hdfs://10.0.86.89:9000/xjzhu/piflow/checkpoints/")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }



} 
Example 103
Source File: UploadToFtpTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.ftp

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.PropertyUtil
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class UploadToFtpTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/ftp/UploadToFtp.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("CsvParserTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris", PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 104
Source File: GetMemcacheTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.memcached

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class GetMemcacheTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/memcache/getMemcache.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp","-tcpAllowOthers","-tcpPort","50001").start()


    //execute flow
    val spark = SparkSession.builder()
      .master("spark://10.0.86.89:7077")
      .appName("piflow-hive-bundle")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("spark.jars","/root/Desktop/gitWORK/out/artifacts/piflow_bundle/piflow-bundle.jar")
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "hdfs://10.0.86.89:9000/xjzhu/piflow/checkpoints/")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }
  @Test
  def testFlow2json() = {

    //parse flow json
    val file = "src/main/resources/flow.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]

    //create flow
    val flowBean = FlowBean(map)
    val flowJson = flowBean.toJson()
    println(flowJson)
  }

} 
Example 105
Source File: PutMemcacheTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.memcached

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutMemcacheTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/memcache/putMemcache.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp","-tcpAllowOthers","-tcpPort","50001").start()


    //execute flow
    val spark = SparkSession.builder()
      .master("spark://10.0.86.89:7077")
      .appName("piflow-hive-bundle")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("spark.jars","/root/Desktop/gitWORK/out/artifacts/piflow_bundle/piflow-bundle.jar")
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "hdfs://10.0.86.89:9000/xjzhu/piflow/checkpoints/")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }
  @Test
  def testFlow2json() = {

    //parse flow json
    val file = "src/main/resources/flow.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]

    //create flow
    val flowBean = FlowBean(map)
    val flowJson = flowBean.toJson()
    println(flowJson)
  }

} 
Example 106
Source File: Complement.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.memcached

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class Complement {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/memcache/ComplementByMemcache.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val h2Server = Server.createTcpServer("-tcp","-tcpAllowOthers","-tcpPort","50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("spark://10.0.86.89:7077")
      .appName("piflow-hive-bundle")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("spark.jars","/root/Desktop/gitWORK/out/artifacts/piflow_bundle/piflow-bundle.jar")
      .enableHiveSupport()
      .getOrCreate()


    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "hdfs://10.0.86.89:9000/xjzhu/piflow/checkpoints/")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }
  @Test
  def testFlow2json() = {

    //parse flow json
    val file = "src/main/resources/flow.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]

    //create flow
    val flowBean = FlowBean(map)
    val flowJson = flowBean.toJson()
    println(flowJson)
  }

} 
Example 107
Source File: LabelPropagationTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.graphx

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class LabelPropagationTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/graphx/labelpropagation.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 108
Source File: LoadGraph.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.graphx

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class LoadGraph {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/graphx/LoadGraph.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 109
Source File: ReadHbaseTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hbase

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class ReadHbaseTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hbase/ReadHbase.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()
    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MysqlReadTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 110
Source File: PutHbaseTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.hbase

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutHbaseTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/hbase/PutHbase.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()
    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())

    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MysqlReadTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 111
Source File: ReadFromRedisTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.redis

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class ReadFromRedisTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/redis/ReadFromRedis.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress

    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("CsvParserTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 112
Source File: WriteToRedisTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.redis

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class WriteToRedisTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/redis/WriteToRedis.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()

    val ip = InetAddress.getLocalHost.getHostAddress

    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort", "50001").start()

    //execute flow
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("CsvParserTest")
      .config("spark.driver.memory", "1g")
      .config("spark.executor.memory", "2g")
      .config("spark.cores.max", "2")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 113
Source File: GetUrlTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.http

import java.io.{BufferedReader, InputStreamReader, PrintWriter}
import java.net.{HttpURLConnection, InetAddress, URL, URLConnection}

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet}
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class GetUrlTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/http/getUrl.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 114
Source File: PostUrlTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.http

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PostUrlTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/http/postUrl.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 115
Source File: XmlSaveTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.xml

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class XmlSaveTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/xml/xmlSave.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 116
Source File: XmlParserTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.xml

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class XmlParserTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/xml/xmlParser.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 117
Source File: XmlStringTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.xml

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class XmlStringTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/xml/xmlStringParser.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 118
Source File: XmlParserColumnsTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.xml

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class XmlParserColumnsTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/xml/xmlParserColumns.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 119
Source File: XmlParserFolderTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.xml

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class XmlParserFolderTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/xml/xmlParserFolder.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 120
Source File: FileTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.file

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class FileTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/file/file.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 121
Source File: RegexTextProcessTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.file

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class RegexTextProcessTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/file/regexTextProcess.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 122
Source File: PutEsTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.elasticsearch

import java.net.InetAddress

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class PutEsTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/es/PutEs.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }


} 
Example 123
Source File: CertificateTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.block

import com.horizen.utils.BytesUtils
import org.junit.Test
import org.scalatest.junit.JUnitSuite
import org.junit.Assert._

// TODO: extend this test with real cases for: certificate with no withdrawals, with 1, with multiple.
class CertificateTest extends JUnitSuite {

  @Test
  def emptyCertificate(): Unit = {
    //zen-cli.exe -datadir=../examples/simpleapp/target/tmp/node0 -regtest  send_certificate "0000000000000000000000000000000000000000000000000000000000000001" 0 7 "0dd47302855895dd9087b490db6f49621ff40a11fa136de9bfc776a17f814dbe" "54a3666bbc880c5bc0a0116faf62546e1933a0a639e9a727df1b1ec6b9c24d55c2be2ded2a84b1bfb491c7d2b12f136821645a8738e518f863346fadecece2e27c168dfc3f4b08215c664529fd4f44b2d5ffd22be81c3516c354b2c0cd6900002b7caa68c280e046961e20e4b72c2a38950a2791d178e244ebf8b2ac3148aba079ba184b4fe8f1f3b03c96ec66fa809bc4f0f755c2ff31251a7326bce8b08b6ea1b18c5194a0b08b2e982ea4ade43b098874bd57fb29102afe17d1c82b55010000fc0dddf1e7266c7e5cabc735342cf372b77ab6a440739357602951f199c2926c99e6dc1bd8f0473a95416032379ecad974f53804c2705f89423e418cbd98ba6f332b821e585afc0f31d8fe6c1d3ac93b28e1b769285c269ddc4f9657e3140000a4fdd3a1152f1a14decdfae4f9200eac4d12e46285f4bea7871de070744ec2a92de84dabaf1581dd846adb3bdb43e0031a794064be2204d1a2559c8200880f215e03c536d04de51c743d0ed42d33ec726c2747985d49a0ce9c545eae361f0000f9584651cea77ed2fb19a81a576693418549791fcc29a5f6c63652d4dd2228420ec6443d1621e41859214db0d9970b40c69023414fb4ca793286b37125483b630e88f3cb6386e87aee7017aee11d0c077f189c3272f0ad9424ca28376e7c00009576152e60203a4e37c40cfb132c4ad85291cbd869784ff4551f3d12498140bff247ebad5622e64d54e4c0f010f7f38e83d390d49af868fef2266ed2891541ccfd117a3b5e13da711c02bdae446b7ebfee3fa9d666880d57f78c8b12540c00000056673e23231f244c099bffb1f573d8e427ea10dfdd94c86612fe7a3a90ed4c3595db4f98a3f0c4cf0cae96699bf7383c33b58413b5181c278933a5bf730f2a18895830a9775218a9544dbd7f2bba5276c9676cef84e59fe0de27e9e55a5f000021637675c076fd4488c06f76b0765da43c766c0b9f859d9af777ab5ec10b2a5d76453f8da0695a15ec5d25bcc870385fd4ee7620c59ab75d2643f68ed96f2439718756353fb48c0435f7cafc1fb41bf1c9fb660e9b27cbf7548db65f9fac010000" "[]" 0.00001
    val emptyCertHex: String = "fbffffff0100000000000000000000000000000000000000000000000000000000000000000000000700000000000000be4d817fa176c7bfe96d13fa110af41f62496fdb90b48790dd9558850273d40d54a3666bbc880c5bc0a0116faf62546e1933a0a639e9a727df1b1ec6b9c24d55c2be2ded2a84b1bfb491c7d2b12f136821645a8738e518f863346fadecece2e27c168dfc3f4b08215c664529fd4f44b2d5ffd22be81c3516c354b2c0cd6900002b7caa68c280e046961e20e4b72c2a38950a2791d178e244ebf8b2ac3148aba079ba184b4fe8f1f3b03c96ec66fa809bc4f0f755c2ff31251a7326bce8b08b6ea1b18c5194a0b08b2e982ea4ade43b098874bd57fb29102afe17d1c82b55010000fc0dddf1e7266c7e5cabc735342cf372b77ab6a440739357602951f199c2926c99e6dc1bd8f0473a95416032379ecad974f53804c2705f89423e418cbd98ba6f332b821e585afc0f31d8fe6c1d3ac93b28e1b769285c269ddc4f9657e3140000a4fdd3a1152f1a14decdfae4f9200eac4d12e46285f4bea7871de070744ec2a92de84dabaf1581dd846adb3bdb43e0031a794064be2204d1a2559c8200880f215e03c536d04de51c743d0ed42d33ec726c2747985d49a0ce9c545eae361f0000f9584651cea77ed2fb19a81a576693418549791fcc29a5f6c63652d4dd2228420ec6443d1621e41859214db0d9970b40c69023414fb4ca793286b37125483b630e88f3cb6386e87aee7017aee11d0c077f189c3272f0ad9424ca28376e7c00009576152e60203a4e37c40cfb132c4ad85291cbd869784ff4551f3d12498140bff247ebad5622e64d54e4c0f010f7f38e83d390d49af868fef2266ed2891541ccfd117a3b5e13da711c02bdae446b7ebfee3fa9d666880d57f78c8b12540c00000056673e23231f244c099bffb1f573d8e427ea10dfdd94c86612fe7a3a90ed4c3595db4f98a3f0c4cf0cae96699bf7383c33b58413b5181c278933a5bf730f2a18895830a9775218a9544dbd7f2bba5276c9676cef84e59fe0de27e9e55a5f000021637675c076fd4488c06f76b0765da43c766c0b9f859d9af777ab5ec10b2a5d76453f8da0695a15ec5d25bcc870385fd4ee7620c59ab75d2643f68ed96f2439718756353fb48c0435f7cafc1fb41bf1c9fb660e9b27cbf7548db65f9fac010000010b22133a9a03fa5a137ace06923b8bb0aa290b10f540401176e4a3e3290ea195000000006a4730440220633f69f5adef62ee1cac3d0ad760bebc5cb2a1e91d9468fb2ca9cea8be96734402206f61be8eca4760d1bbfaaac70b7c472f9a2beb083d4c4386e081702aeac97fbf0121025052fe8be792cab34422ef3cc8f61aa5b3713767469eb61d0dc12b17929af3d9ffffffff0160677d01000000003c76a914429575e060e663fc4047fcd976342f5a09381bd088ac20bb1acf2c1fc1228967a611c7db30632098f0c641855180b5fe23793b72eea50d00b400"

    val bytes: Array[Byte] = BytesUtils.fromHexString(emptyCertHex)

    val cert: WithdrawalEpochCertificate = WithdrawalEpochCertificate.parse(bytes, 0)

    assertEquals("Certificate epoch number is different.", 0, cert.epochNumber)
    assertEquals("Certificate end block hash is different.", "0dd47302855895dd9087b490db6f49621ff40a11fa136de9bfc776a17f814dbe", BytesUtils.toHexString(cert.endEpochBlockHash))
    assertEquals("Certificate sidechain id is wrong", "0000000000000000000000000000000000000000000000000000000000000001", BytesUtils.toHexString(cert.sidechainId))
    assertEquals("Version is wrong", -5, cert.version)
    assertEquals("Quality is wrong", 7, cert.quality)
    assertEquals("Transaction input size is wrong", 1, cert.transactionInputs.size)
    assertEquals("Transaction output size is wrong", 1, cert.transactionOutputs.size)

    assertEquals("Certificate backward transfer amount is different.", 0, cert.backwardTransferOutputs.size)
  }

  @Test
  def nonEmptyCertificate(): Unit = {
    //zen-cli.exe -datadir=../examples/simpleapp/target/tmp/node0 -regtest  send_certificate "0000000000000000000000000000000000000000000000000000000000000001" 1 7 "059650e8dddcc8522d76af0d087c1d478b2668fc4cfbf3cb492dd022bce6f315" "edb5861923c765115a1fefe8b5e7889ac74910efc38e3a786bd5af0f6e39e5420723a16fcf3a25de8904acfed2aa74e4af28a8080e5dfecb77d03fbb5e0247efb411b65de7bb868148c399206a3ea2eaabcf947e158d5ebcdbafab63d60f010032e89bf4f9471c49ea33e8a0264570abd4e6dc856c056840f4f3cb682e592b647c31f4368b82d3deaa1d30e5de8b7e361f8334897077138527ae3409d524fb65c911ad34423445973fbaf75c382b04e46464ea0132e913473f2ef273cc1d010000589f3cba217f63f18b3342884ae2ebf42f1e772e9f4aa44fb67c0de329024e64a06cede33570a78d02cea6fba5c6a67943d74f54942fc30e8c6a912ebd69bb4ccf454392d1c0f59d662efa0ac46296bc38cee155436d1ffb3c294870e3d6000008c24b9b9c9e8437bf46fffdb80f6af6fc75049c13f5757f640a19039844607e2a291d8f97dd9d11271193d74a609f597c8abd6b43e2b078deed673aa0ccd04af560ece8d2a405b2f35a446704953d95b919761ba5e16d300b8685d02cad0000444097c37a61991c3f893e3dac1d175b585a9402bda770c0edf6199980c9f724a09e2c20e422710d05e2d4fd89223235d8e2913e38cd8624bffdb00052ebcbe0a5c58fb14e90de51e4c4c10fb2490bfaf9d1b902b82d8d36de81a4d4759200009fcb82a7d504fda53d6ad313ec56243712d74933d1e420a3b72bc1036e22575626367a60f81dc785cf2b578459065fd30aa7410d4628cf7d6a7637236367732913f985206bc126120895e4b767366580dda8ae86e79dfaf633a8d6e5a97200000024700b59d475404ea400762d6abc8009d24c7071d91fa521ebab8ce37755a5a2e25b5f1c4f61fc72f4341f6482cc22707e8b127aecb60673d1c504f0aa040a5ae8f26a6b8e4a90eaef2c84cdea954a2680f9aaec161c70241b7db24dc9a500007de59666f4008df78308a9880f904c171c32a5c8363bb025628c2f5d9cfaa714b8bf01c58e55416c888453de3288cee2587cfb700be7046fc248a4abf3770631c639cc81d84c756cc3e7dc009f7f898604305058f6eb32db022abcb96fb2010000" "[{\"pubkeyhash\":\"d01b38095a2f3476d9fc4740fc63e660e0759542\",\"amount\":\"7\"},{\"pubkeyhash\":\"a1a69c439e033b0eb3feb4dcbdd54ad2a4bc715a\",\"amount\":\"3\"}]" 0.00001
    val nonEmptyCertHex: String = "fbffffff010000000000000000000000000000000000000000000000000000000000000001000000070000000000000015f3e6bc22d02d49cbf3fb4cfc68268b471d7c080daf762d52c8dcdde8509605edb5861923c765115a1fefe8b5e7889ac74910efc38e3a786bd5af0f6e39e5420723a16fcf3a25de8904acfed2aa74e4af28a8080e5dfecb77d03fbb5e0247efb411b65de7bb868148c399206a3ea2eaabcf947e158d5ebcdbafab63d60f010032e89bf4f9471c49ea33e8a0264570abd4e6dc856c056840f4f3cb682e592b647c31f4368b82d3deaa1d30e5de8b7e361f8334897077138527ae3409d524fb65c911ad34423445973fbaf75c382b04e46464ea0132e913473f2ef273cc1d010000589f3cba217f63f18b3342884ae2ebf42f1e772e9f4aa44fb67c0de329024e64a06cede33570a78d02cea6fba5c6a67943d74f54942fc30e8c6a912ebd69bb4ccf454392d1c0f59d662efa0ac46296bc38cee155436d1ffb3c294870e3d6000008c24b9b9c9e8437bf46fffdb80f6af6fc75049c13f5757f640a19039844607e2a291d8f97dd9d11271193d74a609f597c8abd6b43e2b078deed673aa0ccd04af560ece8d2a405b2f35a446704953d95b919761ba5e16d300b8685d02cad0000444097c37a61991c3f893e3dac1d175b585a9402bda770c0edf6199980c9f724a09e2c20e422710d05e2d4fd89223235d8e2913e38cd8624bffdb00052ebcbe0a5c58fb14e90de51e4c4c10fb2490bfaf9d1b902b82d8d36de81a4d4759200009fcb82a7d504fda53d6ad313ec56243712d74933d1e420a3b72bc1036e22575626367a60f81dc785cf2b578459065fd30aa7410d4628cf7d6a7637236367732913f985206bc126120895e4b767366580dda8ae86e79dfaf633a8d6e5a97200000024700b59d475404ea400762d6abc8009d24c7071d91fa521ebab8ce37755a5a2e25b5f1c4f61fc72f4341f6482cc22707e8b127aecb60673d1c504f0aa040a5ae8f26a6b8e4a90eaef2c84cdea954a2680f9aaec161c70241b7db24dc9a500007de59666f4008df78308a9880f904c171c32a5c8363bb025628c2f5d9cfaa714b8bf01c58e55416c888453de3288cee2587cfb700be7046fc248a4abf3770631c639cc81d84c756cc3e7dc009f7f898604305058f6eb32db022abcb96fb2010000011c5b538885385d7004b258fc2dc72e3fc7a9c85ef592c84c4330bc9c4e6bb13d000000006a47304402206bfb62078dc4212152a5fbf56ad991a0028697312a4249bd00f3de700bd28986022020bbdd841fd1e3d5cec38cc42b60c34cdb5709d2cf9e1d553bfada2463460dd101210351e6fabaca6d7c63134b3e01154748c3c83ecfd12c05223ad86bee3c2f4fda4bffffffff0178637d01000000003c76a91418cb0d5876875433b1fff0d5bb4c7123331a58c388ac20bb1acf2c1fc1228967a611c7db30632098f0c641855180b5fe23793b72eea50d00b4020027b92900000000429575e060e663fc4047fcd976342f5a09381bd000a3e111000000005a71bca4d24ad5bddcb4feb30e3b039e439ca6a1"

    val bytes: Array[Byte] = BytesUtils.fromHexString(nonEmptyCertHex)

    val cert: WithdrawalEpochCertificate = WithdrawalEpochCertificate.parse(bytes, 0)

    assertEquals("Certificate epoch number is different.", 1, cert.epochNumber)
    assertEquals("Certificate end block hash is different.", "059650e8dddcc8522d76af0d087c1d478b2668fc4cfbf3cb492dd022bce6f315", BytesUtils.toHexString(cert.endEpochBlockHash))
    assertEquals("Certificate sidechain id is wrong", "0000000000000000000000000000000000000000000000000000000000000001", BytesUtils.toHexString(cert.sidechainId))
    assertEquals("Quality is wrong", 7, cert.quality)
    assertEquals("Version is wrong", -5, cert.version)

    assertEquals("Transaction input size is wrong", 1, cert.transactionInputs.size)
    assertEquals("Transaction output size is wrong", 1, cert.transactionOutputs.size)

    assertEquals("Certificate backward transfer amount is different.", 2, cert.backwardTransferOutputs.size)
    assertEquals("Amount for first backward transfer is wrong", 700000000, cert.backwardTransferOutputs.head.amount)
    assertEquals("Output bytes for first backward transfer are wrong", "0027b92900000000429575e060e663fc4047fcd976342f5a09381bd0", BytesUtils.toHexString(cert.backwardTransferOutputs.head.outputBytes))
    assertEquals("Pub key hash for second backward transfer are wrong", "5a71bca4d24ad5bddcb4feb30e3b039e439ca6a1", BytesUtils.toHexString(cert.backwardTransferOutputs(1).pubKeyHash))
  }
} 
Example 124
Source File: PublicKey25519PropositionTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.proposition

import com.fasterxml.jackson.databind.JsonNode
import com.horizen.serialization.ApplicationJsonSerializer
import com.horizen.utils.Ed25519
import org.junit.Assert.assertEquals
import org.junit.Test
import org.scalatest.junit.JUnitSuite
import scorex.core.utils.ScorexEncoder

class PublicKey25519PropositionScalaTest
  extends JUnitSuite
{

  @Test
  def testToJson(): Unit = {
    val seed = "12345".getBytes
    val keyPair = Ed25519.createKeyPair(seed)
    val privateKey = keyPair.getKey
    val publicKey = keyPair.getValue

    val prop1 = new PublicKey25519Proposition(publicKey)

    val serializer = ApplicationJsonSerializer.getInstance()
    serializer.setDefaultConfiguration()

    val jsonStr = serializer.serialize(prop1)

    val node : JsonNode = serializer.getObjectMapper().readTree(jsonStr)

    assertEquals("Json must contain only 1 publicKey.",
      1, node.findValues("publicKey").size())
    assertEquals("PublicKey json value must be the same.",
      ScorexEncoder.default.encode(prop1.pubKeyBytes()), node.path("publicKey").asText())
  }
} 
Example 125
Source File: Signature25519ScalaTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.proof


import com.fasterxml.jackson.databind.JsonNode
import com.horizen.secret.PrivateKey25519Creator
import com.horizen.serialization.ApplicationJsonSerializer
import org.junit.Assert.assertEquals
import org.junit.Test
import org.scalatest.junit.JUnitSuite
import scorex.core.utils.ScorexEncoder

class Signature25519ScalaTest
  extends JUnitSuite
{

  @Test
  def testToJson(): Unit = {
    val testMessage: Array[Byte] = "Test string message to sign/verify.".getBytes
    val seed = "12345".getBytes
    val key = PrivateKey25519Creator.getInstance.generateSecret(seed)
    val prp = key.publicImage
    val pr = key.sign(testMessage)

    val serializer = ApplicationJsonSerializer.getInstance()
    serializer.setDefaultConfiguration()

    val jsonStr = serializer.serialize(pr)

    val node : JsonNode = serializer.getObjectMapper().readTree(jsonStr)

    assertEquals("Json must contain only 1 signature.",
      1, node.findValues("signature").size())
    assertEquals("",
      ScorexEncoder.default.encode(pr.signatureBytes), node.path("signature").asText())
  }
} 
Example 126
Source File: SidechainSecretsCompanionTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.companion

import org.scalatest.junit.JUnitSuite
import org.junit.Test
import org.junit.Assert._
import com.horizen.fixtures._
import com.horizen.customtypes._
import com.horizen.secret._
import java.util.{HashMap => JHashMap}
import java.lang.{Byte => JByte}

import com.horizen.SidechainTypes

class SidechainSecretsCompanionTest
  extends JUnitSuite
  with SecretFixture
  with SidechainTypes
{

  val customSecretSerializers: JHashMap[JByte, SecretSerializer[SidechainTypes#SCS]] = new JHashMap()
  customSecretSerializers.put(CustomPrivateKey.SECRET_TYPE_ID, CustomPrivateKeySerializer.getSerializer.asInstanceOf[SecretSerializer[SidechainTypes#SCS]])

  val sidechainSecretsCompanion = SidechainSecretsCompanion(customSecretSerializers)
  val sidechainSecretsCompanionCore = SidechainSecretsCompanion(new JHashMap())

  @Test def testCore(): Unit = {
    val secret = getPrivateKey25519

    val secretBytes = sidechainSecretsCompanion.toBytes(secret)

    assertNotEquals("Secret must have core type.", secretBytes(0), Byte.MaxValue)
    assertEquals("Secret must have registered core typeId.", secretBytes(0), secret.secretTypeId())
    assertEquals("Deserialization must return same Secret.", secret, sidechainSecretsCompanion.parseBytesTry(secretBytes).get)
  }

  @Test def testRegisteredCustom(): Unit = {
    val customSecret = getCustomPrivateKey

    val customSecretBytes = sidechainSecretsCompanion.toBytes(customSecret)

    assertEquals("Secret must have custom type.", customSecretBytes(0), Byte.MaxValue)
    assertEquals("Secret must have registered custom typeId.", customSecretBytes(1), customSecret.secretTypeId())
    assertEquals("Deserialization must return same Secret.", customSecret, sidechainSecretsCompanion.parseBytesTry(customSecretBytes).get)
  }

  @Test def testUnregisteredCustom(): Unit = {
    val customSecret = getCustomPrivateKey
    var exceptionThrown = false


    // Test 1: try to serialize custom type Secret. Serialization exception expected, because of custom type is unregistered.
    try {
      sidechainSecretsCompanionCore.toBytes(customSecret)
    } catch {
      case _ : Throwable => exceptionThrown = true
    }

    assertTrue("Exception during serialization for unregistered type of Secret must be thrown.", exceptionThrown)


    // Test 2: try to deserialize custom type Secret. Serialization exception expected, because of custom type is unregistered.
    exceptionThrown = false
    val customSecretBytes = sidechainSecretsCompanion.toBytes(customSecret)

    try {
      sidechainSecretsCompanionCore.parseBytesTry(customSecretBytes).get
    } catch {
      case _ : Throwable => exceptionThrown = true
    }

    assertTrue("Exception during deserialization for unregistered type of Secret must be thrown.", exceptionThrown)
  }
} 
Example 127
Source File: SidechainBoxesCompanionTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.companion

import org.scalatest.junit.JUnitSuite
import org.junit.Test
import org.junit.Assert._
import com.horizen.fixtures._
import com.horizen.customtypes._
import com.horizen.box._
import com.horizen.proposition._
import java.util.{HashMap => JHashMap}
import java.lang.{Byte => JByte}

import com.horizen.SidechainTypes

class SidechainBoxesCompanionTest
  extends JUnitSuite
  with BoxFixture
  with SidechainTypes
{

  var customBoxesSerializers: JHashMap[JByte, BoxSerializer[SidechainTypes#SCB]] = new JHashMap()
  customBoxesSerializers.put(CustomBox.BOX_TYPE_ID, CustomBoxSerializer.getSerializer.asInstanceOf[BoxSerializer[SidechainTypes#SCB]])

  val sidechainBoxesCompanion = SidechainBoxesCompanion(customBoxesSerializers)
  val sidechainBoxesCompanionCore = SidechainBoxesCompanion(new JHashMap())

  @Test
  def testCore(): Unit = {
    // Test 1: RegularBox serialization/deserialization
    val regularBox = getRegularBox

    val regularBoxBytes = sidechainBoxesCompanion.toBytes(regularBox)

    assertEquals("Type of serialized box must be RegularBox.", regularBox.boxTypeId(), regularBoxBytes(0))
    assertEquals("Deserialization must restore same box.", regularBox, sidechainBoxesCompanion.parseBytesTry(regularBoxBytes).get)


    // Test 2: CertifierRightBox serialization/deserialization
    val certifiedRightBox = getCertifierRightBox

    val certifiedRightBoxBytes = sidechainBoxesCompanion.toBytes(certifiedRightBox)

    assertEquals("Type of serialized box must be CertifierRightBox.", certifiedRightBox.boxTypeId(), certifiedRightBoxBytes(0))
    assertEquals("Deserialization must restore same box.", certifiedRightBox, sidechainBoxesCompanion.parseBytesTry(certifiedRightBoxBytes).get)


    // Test 3: ForgerBox serialization/deserialization
    val forgerBox = getForgerBox

    val forgerBoxBytes = sidechainBoxesCompanion.toBytes(forgerBox)

    assertEquals("Type of serialized box must be ForgerBox.", forgerBox.boxTypeId(), forgerBoxBytes(0))
    assertEquals("Deserialization must restore same box.", forgerBox, sidechainBoxesCompanion.parseBytesTry(forgerBoxBytes).get)
  }

  @Test
  def testRegisteredCustom(): Unit = {
    val customBox = getCustomBox.asInstanceOf[SidechainTypes#SCB]

    val customBoxBytes = sidechainBoxesCompanion.toBytes(customBox)
    assertEquals("Box type must be custom.", Byte.MaxValue, customBoxBytes(0))
    assertEquals("Type of serialized box must be CustomBox.", customBox.boxTypeId(), customBoxBytes(1))
    assertEquals("Deserialization must restore same box.", customBox, sidechainBoxesCompanion.parseBytesTry(customBoxBytes).get)
  }

  @Test
  def testUnregisteredCustom(): Unit = {
    val customBox = getCustomBox.asInstanceOf[SidechainTypes#SCB]
    var exceptionThrown = false


    // Test 1: try to serialize custom type Box. Serialization exception expected, because of custom type is unregistered.
    try {
      sidechainBoxesCompanionCore.toBytes(customBox)
    } catch {
      case _ : Throwable => exceptionThrown = true
    }

    assertTrue("Exception must be thrown for unregistered box type.", exceptionThrown)


    // Test 2: try to deserialize custom type Box. Serialization exception expected, because of custom type is unregistered.
    exceptionThrown = false
    val customBoxBytes = sidechainBoxesCompanion.toBytes(customBox)

    try {
      sidechainBoxesCompanionCore.parseBytesTry(customBoxBytes).get
    } catch {
      case _ : Throwable => exceptionThrown = true
    }

    assertTrue("Exception must be thrown for unregistered box type.", exceptionThrown)
  }
} 
Example 128
Source File: HistoryConsensusCheckerTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.consensus

import java.util.Random

import com.horizen.SidechainHistory
import com.horizen.fixtures.sidechainblock.generation._
import com.horizen.params.{NetworkParams, TestNetParams}
import org.junit.Test
import org.scalatest.junit.JUnitSuite

import scala.collection.mutable
import scala.util.{Failure, Success, Try}


class HistoryConsensusCheckerTest extends JUnitSuite with HistoryConsensusChecker {

  def testWithSeed(testSeed: Int): Unit = {
    //val testSeed = 234
    val rnd: Random = new Random(testSeed)

    val initialParams = TestNetParams(consensusSlotsInEpoch = 10, sidechainGenesisBlockTimestamp = 1333344452L)
    val (params, genesisBlock, genesisGenerator, genesisForgingData, genesisEndEpochInfo) = SidechainBlocksGenerator.startSidechain(10000000000L, testSeed, initialParams)
    val history: SidechainHistory = createHistory(params, genesisBlock, genesisEndEpochInfo)
    val nonce = history.calculateNonceForEpoch(blockIdToEpochId(genesisBlock.id))
    val stake = genesisEndEpochInfo.stakeConsensusEpochInfo
    history.applyFullConsensusInfo(genesisBlock.id, FullConsensusEpochInfo(stake, nonce))
    println(s"//////////////// Genesis epoch ${genesisBlock.id} had been ended ////////////////")

    val generators = mutable.IndexedSeq(genesisGenerator)

    (1 to 50)
      .foldLeft[(SidechainHistory, mutable.IndexedSeq[SidechainBlocksGenerator])]((history, generators)) { (acc, index) =>
        val currentHistory: SidechainHistory = acc._1
        val currentGenerators: mutable.IndexedSeq[SidechainBlocksGenerator] =  acc._2

        val nextGenerator: SidechainBlocksGenerator = generatorSelection(rnd, currentGenerators)
        val nextCorrectGenerationRules: GenerationRules = GenerationRules.generateCorrectGenerationRules(rnd, nextGenerator.getNotSpentBoxes)

        println("try to add incorrect block(s)")
        tryToAddIncorrectBlocks(params, currentHistory, nextGenerator, nextCorrectGenerationRules, rnd)
        println("try to add correct block")
        val correctRes = Try(generateBlock(nextCorrectGenerationRules, nextGenerator, history)) match {
          case Success((gens, generatedBlock)) =>
            val updatedHistory = historyUpdateShallBeSuccessful(currentHistory, generatedBlock)
            val updatedGenerators = currentGenerators ++ gens
            (updatedHistory, updatedGenerators)

          case Failure(ex: GenerationIsNoLongerPossible) =>
            println("Finishing block generation")
            return

          case Failure(ex) =>
            println("Error during block generation")
            throw ex
        }

        correctRes
      }
  }

  private def tryToAddIncorrectBlocks(params: NetworkParams,
                                      currentHistory: SidechainHistory,
                                      currentGenerator: SidechainBlocksGenerator,
                                      correctGenerationRules: GenerationRules,
                                      rnd: Random,
                                      incorrectBlocksCount: Int = 2): Unit = Try {
    (1 to incorrectBlocksCount)
      .foreach{ _ =>
        val incorrectGenerationRules: GenerationRules = CorruptedGenerationRules.corruptGenerationRules(rnd, params, currentGenerator, correctGenerationRules)
        //println(s"Generated corruption rules are: ${incorrectGenerationRules}")
        currentGenerator
          .tryToGenerateBlockForCurrentSlot(incorrectGenerationRules)
          .map(generationInfo => historyUpdateShallBeFailed(currentHistory,generationInfo.block, incorrectGenerationRules))
    }
  }

  @Test
  def testManySeeds(): Unit = {
    val seed = 9084

    (50 to 50).foreach{index =>
      println(s"SEED IS ${index}")
      testWithSeed(index + seed)
    }
  }

} 
Example 129
Source File: ConsensusDataStorageTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.consensus

import com.horizen.storage.InMemoryStorageAdapter
import com.horizen.utils._
import org.junit.Test
import scorex.util._
import org.junit.Assert._

import scala.util.Random


class ConsensusDataStorageTest {

  @Test
  def simpleTest(): Unit = {
    val rnd = new Random(23)

    val storage = new ConsensusDataStorage(new InMemoryStorageAdapter())

    val stakeData: Map[ConsensusEpochId, StakeConsensusEpochInfo] = (1 to 100).map{ _ =>
      val id = blockIdToEpochId(bytesToId(Utils.doubleSHA256Hash(rnd.nextLong().toString.getBytes)))
      val stakeInfo =
        StakeConsensusEpochInfo(Utils.doubleSHA256Hash(rnd.nextLong().toString.getBytes).take(merkleTreeHashLen), rnd.nextLong())
      (id, stakeInfo)
    }.toMap

    stakeData.foreach{case (id, stake) => storage.addStakeConsensusEpochInfo(id, stake)}

    assertTrue(stakeData.forall{case (id, stake) => storage.getStakeConsensusEpochInfo(id).get == stake})
    assertTrue(stakeData.forall{case (id, _) =>
      val nonExistingId = blockIdToEpochId(bytesToId(Utils.doubleSHA256Hash(id.getBytes())))
      storage.getStakeConsensusEpochInfo(nonExistingId).isEmpty
    })

    val nonceData: Map[ConsensusEpochId, NonceConsensusEpochInfo] = (1 to 100).map{ _ =>
      val id = blockIdToEpochId(bytesToId(Utils.doubleSHA256Hash(rnd.nextLong().toString.getBytes)))
      val nonceInfo =
        NonceConsensusEpochInfo(byteArrayToConsensusNonce(Utils.doubleSHA256Hash(rnd.nextLong().toString.getBytes)))
      (id, nonceInfo)
    }.toMap

    nonceData.foreach{case (id, nonceInfo) => storage.addNonceConsensusEpochInfo(id, nonceInfo)}

    assertTrue(nonceData.forall{case (id, nonce) => storage.getNonceConsensusEpochInfo(id).get == nonce})
    assertTrue(nonceData.forall{case (id, _) =>
      val nonExistingId = blockIdToEpochId(bytesToId(Utils.doubleSHA256Hash(id.getBytes())))
      storage.getNonceConsensusEpochInfo(nonExistingId).isEmpty
    })
  }
} 
Example 130
Source File: KafkaStringEventBatchTest.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.log



import java.util.Collections

import com.yahoo.maha.data.{Compressor, StringEventBatch}
import org.junit.{Assert, Test}

class KafkaStringEventBatchTest {
  @Test
  def testWithNoConfiguration(): Unit = {
    val encoder = new KafkaStringEventBatchSerializer()
    encoder.configure(Collections.emptyMap(), false)
    val decoder = new KafkaStringEventBatchDeserializer()
    decoder.configure(Collections.emptyMap(), false)
    val builder = new StringEventBatch.Builder(3)
    builder.add("one")
    builder.add("two")
    builder.add("three")
    val recordList = builder.build.asInstanceOf[StringEventBatch]
    val encoded = encoder.serialize("blah", recordList)
    val decoded = decoder.deserialize("blah", encoded)
    Assert.assertEquals(recordList.getEvents.size(), decoded.getEvents.size())
    Assert.assertEquals(recordList.getEvents.get(0), decoded.getEvents.get(0))
    Assert.assertEquals(recordList.getEvents.get(1), decoded.getEvents.get(1))
    Assert.assertEquals(recordList.getEvents.get(2), decoded.getEvents.get(2))
  }

  @Test
  def testWithConfiguration(): Unit = {
    import scala.collection.JavaConverters._
    val config: java.util.Map[String, _] = Map(
      Compressor.COMPRESSOR_CODEC_PROPERTY -> "lz4hc"
      , KafkaStringEventBatchSerializer.BUFFER_MB_PROPERTY -> "2"
      , KafkaStringEventBatchDeserializer.BUFFER_MB_PROPERTY -> "2"
    ).asJava
    val encoder = new KafkaStringEventBatchSerializer()
    encoder.configure(config, false)
    val decoder = new KafkaStringEventBatchDeserializer()
    decoder.configure(config, false)
    val builder = new StringEventBatch.Builder(3)
    builder.add("one")
    builder.add("two")
    builder.add("three")
    val recordList = builder.build.asInstanceOf[StringEventBatch]
    val encoded = encoder.serialize("blah", recordList)
    val decoded = decoder.deserialize("blah", encoded)
    Assert.assertEquals(recordList.getEvents.size(), decoded.getEvents.size())
    Assert.assertEquals(recordList.getEvents.get(0), decoded.getEvents.get(0))
    Assert.assertEquals(recordList.getEvents.get(1), decoded.getEvents.get(1))
    Assert.assertEquals(recordList.getEvents.get(2), decoded.getEvents.get(2))
  }

  @Test
  def testWithBadConfiguration(): Unit = {
    import scala.collection.JavaConverters._
    val config: java.util.Map[String, _] = Map(
      Compressor.COMPRESSOR_CODEC_PROPERTY -> "blah"
      , KafkaStringEventBatchSerializer.BUFFER_MB_PROPERTY -> "abc"
      , KafkaStringEventBatchDeserializer.BUFFER_MB_PROPERTY -> "-1"
    ).asJava
    val encoder = new KafkaStringEventBatchSerializer()
    encoder.configure(config, false)
    val decoder = new KafkaStringEventBatchDeserializer()
    decoder.configure(config, false)
    val builder = new StringEventBatch.Builder(3)
    builder.add("one")
    builder.add("two")
    builder.add("three")
    val recordList = builder.build.asInstanceOf[StringEventBatch]
    val encoded = encoder.serialize("blah", recordList)
    val decoded = decoder.deserialize("blah", encoded)
    Assert.assertEquals(recordList.getEvents.size(), decoded.getEvents.size())
    Assert.assertEquals(recordList.getEvents.get(0), decoded.getEvents.get(0))
    Assert.assertEquals(recordList.getEvents.get(1), decoded.getEvents.get(1))
    Assert.assertEquals(recordList.getEvents.get(2), decoded.getEvents.get(2))
  }
} 
Example 131
Source File: KryoStringEventBatchTest.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.log



import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.yahoo.maha.data.StringEventBatch
import org.junit.{Assert, Test}

class KryoStringEventBatchTest {
  @Test
  @throws[Exception]
  def test(): Unit = {
    val kryo = new Kryo
    kryo.register(classOf[StringEventBatch], new KryoStringEventBatch)
    val builder = new StringEventBatch.Builder(3)
    builder.add("one")
    builder.add("two")
    builder.add("three")
    val recordList = builder.build.asInstanceOf[StringEventBatch]
    val output = new Output(new Array[Byte](1024 * 1024 + 1))
    kryo.writeObject(output, recordList)
    System.out.println("output.position=" + output.position)
    val input = new Input(output.getBuffer, 0, output.total.toInt)
    val resultRecordList = kryo.readObject(input, classOf[StringEventBatch])
    Assert.assertEquals(resultRecordList.getEvents.get(0), "one")
    Assert.assertEquals(resultRecordList.getEvents.get(1), "two")
    Assert.assertEquals(resultRecordList.getEvents.get(2), "three")
    val output2 = new Output(new Array[Byte](1024 * 1024 + 1))
    kryo.writeObject(output2, resultRecordList)
  }
} 
Example 132
Source File: UtilsTest.scala    From spark-http-stream   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
import java.sql.Date

import org.apache.spark.SparkConf
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.SparkSession
import org.junit.Assert
import org.junit.Test
import java.io.ByteArrayOutputStream
import java.io.InputStream
import org.apache.commons.io.IOUtils
import com.esotericsoftware.kryo.io.Input
import org.apache.spark.sql.execution.streaming.http.KryoSerializerUtils

class UtilsTest {
	@Test
	def testKryoSerDe() {
		val d1 = new Date(30000);
		val bytes = KryoSerializerUtils.serialize(d1);
		val d2 = KryoSerializerUtils.deserialize(bytes);
		Assert.assertEquals(d1, d2);

		val d3 = Map('x' -> Array("aaa", "bbb"), 'y' -> Array("ccc", "ddd"));
		println(d3);
		val bytes2 = KryoSerializerUtils.serialize(d3);
		val d4 = KryoSerializerUtils.deserialize(bytes2).asInstanceOf[Map[String, Any]];
		println(d4);
	}

	@Test
	def testEncoderSchema() {
		val spark = SparkSession.builder.master("local[4]")
			.getOrCreate();
		val sqlContext = spark.sqlContext;
		import sqlContext.implicits._
		import org.apache.spark.sql.catalyst.encoders.encoderFor
		val schema1 = encoderFor[String].schema;
		val schema2 = encoderFor[(String)].schema;
		val schema3 = encoderFor[((String))].schema;

		Assert.assertEquals(schema1, schema2);
		Assert.assertEquals(schema1, schema3);
	}

	@Test
	def testDateInTuple() {
		val spark = SparkSession.builder.master("local[4]")
			.getOrCreate();
		val sqlContext = spark.sqlContext;
		import sqlContext.implicits._

		val d1 = new Date(30000);
		val ds = sqlContext.createDataset(Seq[(Int, Date)]((1, d1)));
		val d2 = ds.collect()(0)._2;

		//NOTE: d1!=d2, maybe a bug
		println(d1.equals(d2));
	}
} 
Example 133
Source File: HttpStreamServerClientTest.scala    From spark-http-stream   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
import org.apache.spark.SparkConf
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql.Row
import org.apache.spark.sql.execution.streaming.http.HttpStreamClient
import org.junit.Assert
import org.junit.Test
import org.apache.spark.sql.types.LongType
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.types.DoubleType
import org.apache.spark.sql.types.BooleanType
import org.apache.spark.sql.types.FloatType
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.ByteType
import org.apache.spark.sql.execution.streaming.http.HttpStreamServer
import org.apache.spark.sql.execution.streaming.http.StreamPrinter
import org.apache.spark.sql.execution.streaming.http.HttpStreamServerSideException


class HttpStreamServerClientTest {
	val ROWS1 = Array(Row("hello1", 1, true, 0.1f, 0.1d, 1L, '1'.toByte),
		Row("hello2", 2, false, 0.2f, 0.2d, 2L, '2'.toByte),
		Row("hello3", 3, true, 0.3f, 0.3d, 3L, '3'.toByte));

	val ROWS2 = Array(Row("hello"),
		Row("world"),
		Row("bye"),
		Row("world"));

	@Test
	def testHttpStreamIO() {
		//starts a http server
		val kryoSerializer = new KryoSerializer(new SparkConf());
		val server = HttpStreamServer.start("/xxxx", 8080);

		val spark = SparkSession.builder.appName("testHttpTextSink").master("local[4]")
			.getOrCreate();
		spark.conf.set("spark.sql.streaming.checkpointLocation", "/tmp/");

		val sqlContext = spark.sqlContext;
		import spark.implicits._
		//add a local message buffer to server, with 2 topics registered
		server.withBuffer()
			.addListener(new StreamPrinter())
			.createTopic[(String, Int, Boolean, Float, Double, Long, Byte)]("topic-1")
			.createTopic[String]("topic-2");

		val client = HttpStreamClient.connect("http://localhost:8080/xxxx");
		//tests schema of topics
		val schema1 = client.fetchSchema("topic-1");
		Assert.assertArrayEquals(Array[Object](StringType, IntegerType, BooleanType, FloatType, DoubleType, LongType, ByteType),
			schema1.fields.map(_.dataType).asInstanceOf[Array[Object]]);

		val schema2 = client.fetchSchema("topic-2");
		Assert.assertArrayEquals(Array[Object](StringType),
			schema2.fields.map(_.dataType).asInstanceOf[Array[Object]]);

		//prepare to consume messages
		val sid1 = client.subscribe("topic-1")._1;
		val sid2 = client.subscribe("topic-2")._1;

		//produces some data
		client.sendRows("topic-1", 1, ROWS1);

		val sid4 = client.subscribe("topic-1")._1;
		val sid5 = client.subscribe("topic-2")._1;

		client.sendRows("topic-2", 1, ROWS2);

		//consumes data
		val fetched = client.fetchStream(sid1).map(_.originalRow);
		Assert.assertArrayEquals(ROWS1.asInstanceOf[Array[Object]], fetched.asInstanceOf[Array[Object]]);
		//it is empty now
		Assert.assertArrayEquals(Array[Object](), client.fetchStream(sid1).map(_.originalRow).asInstanceOf[Array[Object]]);
		Assert.assertArrayEquals(ROWS2.asInstanceOf[Array[Object]], client.fetchStream(sid2).map(_.originalRow).asInstanceOf[Array[Object]]);
		Assert.assertArrayEquals(Array[Object](), client.fetchStream(sid4).map(_.originalRow).asInstanceOf[Array[Object]]);
		Assert.assertArrayEquals(ROWS2.asInstanceOf[Array[Object]], client.fetchStream(sid5).map(_.originalRow).asInstanceOf[Array[Object]]);
		Assert.assertArrayEquals(Array[Object](), client.fetchStream(sid5).map(_.originalRow).asInstanceOf[Array[Object]]);

		client.unsubscribe(sid4);
		try {
			client.fetchStream(sid4);
			//exception should be thrown, because subscriber id is invalidated
			Assert.assertTrue(false);
		}
		catch {
			case e: Throwable ⇒
				e.printStackTrace();
				Assert.assertEquals(classOf[HttpStreamServerSideException], e.getClass);
		}

		server.stop();
	}
} 
Example 134
Source File: BruteForceTicTacToeStrategyTest.scala    From fx-tictactoe   with Apache License 2.0 5 votes vote down vote up
package net.ladstatt.tictactoe

import org.junit.Assert._
import org.junit.Test



  @Test def testPlaceInTheMiddle(): Unit = {
    val g = TicTacToe(
      """o--
        |---
        |---""".stripMargin)
    calcNextTurn(g) match {
      case None => fail()
      case Some(m) => assertEquals(MiddleCenter, m)
    }
  }

} 
Example 135
Source File: ResourceInjectionTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.router2

import javax.naming.ConfigurationException

import com.google.inject.Injector
import com.google.inject.ProvisionException
import com.typesafe.scalalogging.StrictLogging
import org.junit.Test

trait ResourceInjectionTest extends StrictLogging {
  def injector: Injector

  @Test
  def routerInjection(): Unit = {
    injector.getProvider(classOf[NaptimePlayRouter])
  }

  @Test
  def resourceInjection(): Unit = {
    val naptimePlayRouter = try {
      Some(injector.getInstance(classOf[NaptimePlayRouter]))
    } catch {
      case e: ConfigurationException =>
        logger.warn(s"No instance of 'NaptimePlayRouter' bound. Skipping router2 tests.", e)
        None
      case e: ProvisionException =>
        logger.error("Encountered an exception provisioning 'NaptimePlayRouter'.", e)
        None
    }

    for {
      router <- naptimePlayRouter
      resource <- router.naptimeRoutes.routerBuilders
    } {
      injector.getProvider(resource.resourceClass())
      logger.debug(s"Resource ${resource.resourceClass().getName} is injectable.")
    }
  }
} 
Example 136
Source File: AuthMacroTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime

import akka.stream.Materializer
import org.coursera.naptime.access.HeaderAccessControl
import org.coursera.naptime.model.KeyFormat
import org.coursera.naptime.resources.TopLevelCollectionResource
import org.coursera.naptime.router2._
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mockito.MockitoSugar
import play.api.libs.json.OFormat
import play.api.mvc.RequestHeader

import scala.concurrent.ExecutionContext

case class CustomAuth()

object CustomAuthorizer extends HeaderAccessControl[CustomAuth] {
  override def run(requestHeader: RequestHeader)(implicit executionContext: ExecutionContext) = ???
  override private[naptime] def check(authInfo: CustomAuth) = ???
}

class AuthorizedResource(
    implicit val executionContext: ExecutionContext,
    val materializer: Materializer)
    extends TopLevelCollectionResource[String, Item] {

  override def keyFormat: KeyFormat[String] = KeyFormat.stringKeyFormat

  override implicit def resourceFormat: OFormat[Item] = Item.jsonFormat

  override def resourceName: String = "items"

  implicit val fields = Fields.withDefaultFields("name")

  def get(id: String) =
    Nap
      .auth(CustomAuthorizer)
      .get { ctx =>
        ???
      }

}

object AuthorizedResource {
  val routerBuilder = Router.build[AuthorizedResource]
}

class AuthMacroTest extends AssertionsForJUnit with MockitoSugar with ResourceTestImplicits {

  val schema = AuthorizedResource.routerBuilder.schema

  @Test
  def get(): Unit = {
    val handler = schema.handlers.find(_.name === "get").get
    assert(handler.authType === Some("org.coursera.naptime.CustomAuth"))
  }

} 
Example 137
Source File: QueryComplexityFilterTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.ari.graphql.controllers.filters

import org.coursera.naptime.ResourceTestImplicits
import org.junit.Test

class QueryComplexityFilterTest extends FilterTest with ResourceTestImplicits {

  val config = ComplexityFilterConfiguration.DEFAULT
  val filter = new QueryComplexityFilter(graphqlSchemaProvider, config)

  @Test
  def emptyQuery(): Unit = {
    val incomingQuery = generateIncomingQuery()
    val outgoingQuery = run(incomingQuery).futureValue
    assert(outgoingQuery === baseOutgoingQuery)
  }

  @Test
  def complexQuery(): Unit = {
    val query =
      """
        |query {
        |  CoursesV1Resource {
        |    getAll(limit: 100000) {
        |      elements {
        |        id
        |      }
        |    }
        |  }
        |}
      """.stripMargin
    val incomingQuery = generateIncomingQuery(query)
    val outgoingQuery = ensureNotPropagated(incomingQuery).futureValue

    assert(outgoingQuery.response.value("error").as[String] === "Query is too complex.")
    assert(outgoingQuery.response.value("complexity").as[Int] === 200001)
  }
} 
Example 138
Source File: NaptimeEnumFieldTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.ari.graphql.schema

import com.linkedin.data.schema.EnumDataSchema
import com.linkedin.data.schema.Name
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mockito.MockitoSugar
import sangria.schema.EnumType

import scala.collection.JavaConverters._

class NaptimeEnumFieldTest extends AssertionsForJUnit with MockitoSugar {

  def buildEnumDataSchema(values: List[String]): EnumDataSchema = {
    val enum = new EnumDataSchema(new Name("testEnum"))
    val stringBuilder = new java.lang.StringBuilder()
    enum.setSymbols(values.asJava, stringBuilder)
    enum
  }

  @Test
  def build_RegularEnum(): Unit = {
    val values = List("valueOne", "valueTwo")
    val enum = buildEnumDataSchema(values)
    val field = NaptimeEnumField.build(enum, "myField")
    assert(field.fieldType.asInstanceOf[EnumType[String]].values.map(_.name) === values)
  }

  @Test
  def build_EmptyEnum(): Unit = {
    val values = List()
    val expectedValues = List("UNKNOWN")
    val enum = buildEnumDataSchema(values)
    val field = NaptimeEnumField.build(enum, "myField")
    assert(field.fieldType.asInstanceOf[EnumType[String]].values.map(_.name) === expectedValues)
  }

} 
Example 139
Source File: NaptimePaginatedResourceFieldTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.ari.graphql.schema

import org.coursera.naptime.ResourceName
import org.coursera.naptime.ari.graphql.Models
import org.coursera.naptime.ari.graphql.SangriaGraphQlContext
import org.coursera.naptime.ari.graphql.helpers.ArgumentBuilder
import org.junit.Test
import org.mockito.Mockito.when
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mockito.MockitoSugar

import scala.concurrent.ExecutionContext

class NaptimePaginatedResourceFieldTest extends AssertionsForJUnit with MockitoSugar {

  val fieldName = "relatedIds"
  val resourceName = ResourceName("courses", 1)
  val context = SangriaGraphQlContext(null, null, ExecutionContext.global, debugMode = false)

  private[this] val schemaMetadata = mock[SchemaMetadata]
  private[this] val resource = Models.courseResource
  when(schemaMetadata.getResourceOpt(resourceName)).thenReturn(Some(resource))
  when(schemaMetadata.getSchema(resource)).thenReturn(Some(null))

  @Test
  def computeComplexity(): Unit = {
    val field = NaptimePaginatedResourceField.build(
      schemaMetadata,
      resourceName,
      fieldName,
      None,
      None,
      List.empty)

    val argDefinitions = NaptimePaginationField.paginationArguments

    val limitTen = field.right.get.complexity.get
      .apply(context, ArgumentBuilder.buildArgs(argDefinitions, Map("limit" -> Some(10))), 1)
    assert(limitTen === 1 * NaptimePaginatedResourceField.COMPLEXITY_COST * 1)

    val limitFifty = field.right.get.complexity.get
      .apply(context, ArgumentBuilder.buildArgs(argDefinitions, Map("limit" -> Some(50))), 1)
    assert(limitFifty === 5 * NaptimePaginatedResourceField.COMPLEXITY_COST * 1)

    val limitZero = field.right.get.complexity.get
      .apply(context, ArgumentBuilder.buildArgs(argDefinitions, Map("limit" -> Some(1))), 1)
    assert(limitZero === 1 * NaptimePaginatedResourceField.COMPLEXITY_COST * 1)

    val childScoreFive = field.right.get.complexity.get
      .apply(context, ArgumentBuilder.buildArgs(argDefinitions, Map("limit" -> Some(1))), 5)
    assert(childScoreFive === 1 * NaptimePaginatedResourceField.COMPLEXITY_COST * 5)

  }

} 
Example 140
Source File: TypedDefinitionsTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.courier

import org.coursera.naptime.courier.TestTypedDefinition.TestTypedDefinitionAlphaMember
import org.coursera.naptime.courier.TestTypedDefinition.TestTypedDefinitionBetaMember
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class TypedDefinitionsTest extends AssertionsForJUnit {
  @Test
  def typeNameForInstance(): Unit = {
    val alphaMember = TestTypedDefinition.TestTypedDefinitionAlphaMember(TestTypedDefinitionAlpha())
    assertResult("alpha")(TypedDefinitions.typeName(alphaMember))

    val betaMember = TestTypedDefinition.TestTypedDefinitionBetaMember(TestTypedDefinitionBeta())
    assertResult("beta")(TypedDefinitions.typeName(betaMember))
  }

  @Test
  def typeNameForClass(): Unit = {
    assertResult("alpha")(TypedDefinitions.typeName(TestTypedDefinitionAlphaMember))
    assertResult("beta")(TypedDefinitions.typeName(TestTypedDefinitionBetaMember))
  }

} 
Example 141
Source File: CourierSerializerTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.courier

import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class CourierSerializerTest extends AssertionsForJUnit {
  import CourierTestFixtures._

  @Test
  def testRecordTemplates(): Unit = {
    val mock = CourierSerializer.read[TypedDefinitionRecord](typedDefinitionJson)
    val roundTripped = CourierSerializer.write(mock)
    assert(
      CourierSerializer.read[TypedDefinitionRecord](roundTripped) ===
        CourierSerializer.read[TypedDefinitionRecord](typedDefinitionJson))
  }

  @Test
  def testUnionTemplates(): Unit = {
    val mock = CourierSerializer.readUnion[MockTyperefUnion](mockTyperefUnionJson)
    val roundTripped = CourierSerializer.writeUnion(mock)
    assert(
      CourierSerializer.readUnion[MockTyperefUnion](roundTripped) ===
        CourierSerializer.readUnion[MockTyperefUnion](mockTyperefUnionJson))
  }
} 
Example 142
Source File: KeyFormatTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.model

import org.coursera.common.jsonformat.JsonFormats
import org.coursera.naptime.model.KeyFormatTest.MembershipId
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import play.api.libs.json.JsString
import play.api.libs.json.JsSuccess
import play.api.libs.json.Json
import play.api.libs.json.Reads
import org.coursera.common.jsonformat.OrFormats.OrReads
import org.coursera.common.stringkey.StringKeyFormat

object KeyFormatTest {
  case class MembershipId(userId: Long, courseId: String)
  object MembershipId {

    implicit val stringKeyFormat: StringKeyFormat[MembershipId] = {
      StringKeyFormat.caseClassFormat((apply _).tupled, unapply)
    }

    val reads: Reads[MembershipId] =
      Json.reads[MembershipId].orReads(JsonFormats.stringKeyFormat[MembershipId])

    val keyFormat =
      KeyFormat.withFallbackReads(reads)(KeyFormat.idAsStringWithFields(Json.format[MembershipId]))
  }
}

class KeyFormatTest extends AssertionsForJUnit {

  @Test
  def testWithComplexReads(): Unit = {
    val oldSerialization = Json.obj("userId" -> 12345L, "courseId" -> "machine-learning")

    val newSerialization = JsString("12345~machine-learning")

    val expected = JsSuccess(MembershipId(12345L, "machine-learning"))

    assert(expected === MembershipId.keyFormat.reads(oldSerialization))
    assert(expected === MembershipId.keyFormat.reads(newSerialization))
  }
} 
Example 143
Source File: NestingTests.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.resources

import akka.stream.Materializer
import org.coursera.common.jsonformat.JsonFormats.Implicits.dateTimeFormat
import org.coursera.naptime.ResourceTestImplicits
import org.coursera.naptime.model.KeyFormat
import org.coursera.naptime.path.ParseFailure
import org.coursera.naptime.path.ParseSuccess
import org.coursera.naptime.path.RootParsedPathKey
import org.coursera.naptime.resources.NestingTests.FriendInfoResource
import org.coursera.naptime.resources.NestingTests.PeopleResource
import org.joda.time.DateTime
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import play.api.libs.json.Json
import play.api.libs.json.OFormat

import scala.concurrent.ExecutionContext

object NestingTests {
  case class Person(name: String)
  object Person {
    implicit val jsonFormat: OFormat[Person] = Json.format[Person]
  }

  class PeopleResource(
      implicit val executionContext: ExecutionContext,
      val materializer: Materializer)
      extends TopLevelCollectionResource[String, Person] {

    override def keyFormat = KeyFormat.stringKeyFormat
    override implicit def resourceFormat = implicitly
    override def resourceName: String = "people"
  }

  case class FriendInfo(since: DateTime, important: Boolean)
  object FriendInfo {
    implicit val jsonFormat: OFormat[FriendInfo] = Json.format[FriendInfo]
  }

  class FriendInfoResource(peopleResource: PeopleResource)(
      implicit val executionContext: ExecutionContext,
      val materializer: Materializer)
      extends CollectionResource[PeopleResource, String, FriendInfo] {

    override def keyFormat = KeyFormat.stringKeyFormat
    override val parentResource = peopleResource
    override implicit def resourceFormat = implicitly
    override def resourceName: String = "friendInfo"
  }
}

class NestingTests extends AssertionsForJUnit with ResourceTestImplicits {

  @Test
  def topLevelRouting(): Unit = {
    val peopleResource = new PeopleResource
    assert(
      ParseSuccess(None, "asdf" ::: RootParsedPathKey) ===
        peopleResource.pathParser.parse("/people.v1/asdf"))
    assert(
      ParseSuccess(Some("/friendInfo.v1/fdsa"), "asdf" ::: RootParsedPathKey) ===
        peopleResource.pathParser.parse("/people.v1/asdf/friendInfo.v1/fdsa"))
    assert(ParseFailure === peopleResource.pathParser.parse("/friendInfo.v1/asdf"))
  }

  @Test
  def nestedRouting(): Unit = {
    val peopleResource = new PeopleResource
    val friendInfoResource = new FriendInfoResource(peopleResource)
    assert(
      ParseSuccess(None, "fdsa" ::: "asdf" ::: RootParsedPathKey) ===
        friendInfoResource.pathParser.parse("/people.v1/asdf/friendInfo.v1/fdsa"))
    assert(ParseFailure === friendInfoResource.pathParser.parse("/friendInfo.v1/fdsa"))
    assert(ParseFailure === friendInfoResource.pathParser.parse("/people.v1/asdf"))
  }
} 
Example 144
Source File: RestContextTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime

import org.junit.Test
import play.api.i18n.Lang
import play.api.mvc.Request
import org.mockito.Mockito.when
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mockito.MockitoSugar

class RestContextTest extends AssertionsForJUnit with MockitoSugar {

  private[this] def makeContext(languagePreferences: Seq[Lang]): RestContext[Unit, Unit] = {
    val mockRequest = mock[Request[Unit]]
    val restContext = new RestContext((), (), mockRequest, null, null, null)
    when(mockRequest.acceptLanguages).thenReturn(languagePreferences)
    restContext
  }

  def test(
      requestLanguages: Seq[Lang],
      availableLanguages: Set[Lang],
      defaultLanguage: Lang,
      expected: Lang): Unit = {
    val restContext = makeContext(requestLanguages)
    assert(restContext.selectLanguage(availableLanguages, defaultLanguage) === expected)
  }

  @Test
  def basicLanguage(): Unit = {
    test(
      requestLanguages = Seq(Lang("en")),
      availableLanguages = Set(Lang("fr"), Lang("en")),
      defaultLanguage = Lang("en"),
      expected = Lang("en"))
  }

  @Test
  def defaultFallback(): Unit = {
    test(
      requestLanguages = Seq(Lang("zh")),
      availableLanguages = Set(Lang("fr"), Lang("en")),
      defaultLanguage = Lang("en"),
      expected = Lang("en"))
  }

  @Test
  def choosePreferred(): Unit = {
    test(
      requestLanguages = Seq(Lang("zh"), Lang("fr"), Lang("en")),
      availableLanguages = Set(Lang("fr"), Lang("en")),
      defaultLanguage = Lang("en"),
      expected = Lang("fr"))
  }
} 
Example 145
Source File: ETagTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime

import org.coursera.common.stringkey.StringKey
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class ETagTest extends AssertionsForJUnit {

  @Test
  def weakSerialization(): Unit = {
    assertResult("W/\"abc\"")(StringKey.toStringKey(ETag.Weak("abc")).key)
    assertResult("W/\"abc\"")(StringKey.toStringKey(ETag.Weak("abc")).key)
  }

  @Test
  def weakDeserialization(): Unit = {
    val stringKey = StringKey("W/\"abc\"")
    assertResult(stringKey.asOpt[ETag.Weak])(Some(ETag.Weak("abc")))
    assertResult(stringKey.asOpt[ETag])(Some(ETag.Weak("abc")))
    assertResult(stringKey.asOpt[ETag.Strong])(None)
  }

  @Test
  def strongSerialization(): Unit = {
    assertResult("\"abc\"")(StringKey.toStringKey(ETag.Strong("abc")).key)
    assertResult("\"abc\"")(StringKey.toStringKey(ETag.Strong("abc")).key)
  }

  @Test
  def strongDeserialization(): Unit = {
    val stringKey = StringKey("\"abc\"")
    assertResult(stringKey.asOpt[ETag.Strong])(Some(ETag.Strong("abc")))
    assertResult(stringKey.asOpt[ETag])(Some(ETag.Strong("abc")))
    assertResult(stringKey.asOpt[ETag.Weak])(None)
  }

} 
Example 146
Source File: FlattenedFilteringJacksonDataCodecTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.actions

import org.coursera.naptime.QueryFields
import org.coursera.naptime.ResourceName
import org.coursera.naptime.actions.RestActionCategoryEngine2.FlattenedFilteringJacksonDataCodec
import org.junit.Ignore
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import play.api.libs.json.Json

class FlattenedFilteringJacksonDataCodecTest extends AssertionsForJUnit {

  @Test
  def testElementsFiltering(): Unit = {
    val unfiltered = Json.obj(
      "elements" -> Json.arr(
        Json.obj("a" -> "1", "b" -> "1", "c" -> "1"),
        Json.obj("a" -> "2", "b" -> "2", "c" -> "2"),
        Json.obj("a" -> "3", "b" -> "3", "c" -> "3")))

    val dataMap = NaptimeSerializer.PlayJson.serialize(unfiltered)

    val expected = Json.obj(
      "elements" -> Json.arr(Json.obj("a" -> "1"), Json.obj("a" -> "2"), Json.obj("a" -> "3")))

    val fields = QueryFields(Set("a"), Map.empty)

    val codec = new FlattenedFilteringJacksonDataCodec(fields)
    val serialized = codec.mapToString(dataMap)
    val deserialized = codec.stringToMap(serialized)

    assert(
      NaptimeSerializer.PlayJson.serialize(expected) === deserialized,
      s"Serialized is: $serialized")
    assert(expected === Json.parse(serialized))
  }

  @Test
  def testRelatedFiltering(): Unit = {
    val unfiltered = Json.obj(
      "elements" -> Json.arr(
        Json.obj("a" -> 1, "b" -> 1, "c" -> 1),
        Json.obj("a" -> 2, "b" -> 2, "c" -> 2),
        Json.obj("a" -> 3, "b" -> 3, "c" -> 3)),
      "linked" -> Json.obj(
        "foo.v1" -> Json
          .arr(Json.obj("x" -> 1, "y" -> 1, "z" -> 1), Json.obj("x" -> 2, "y" -> 2, "z" -> 2)),
        "bar.v2/sub/supersub" -> Json
          .arr(Json.obj("p" -> 1, "q" -> 1, "r" -> 1), Json.obj("p" -> 2, "q" -> 2, "r" -> 2)),
        "unrelated.v3" -> Json.arr(Json.obj("w" -> "oops"))))

    val expected = Json.obj(
      "elements" -> Json.arr(Json.obj("a" -> 1), Json.obj("a" -> 2), Json.obj("a" -> 3)),
      "linked" -> Json.obj(
        "foo.v1" -> Json.arr(Json.obj("x" -> 1), Json.obj("x" -> 2)),
        "bar.v2/sub/supersub" -> Json.arr(Json.obj("p" -> 1), Json.obj("p" -> 2))))

    val fields = QueryFields(
      Set("a"),
      Map(
        ResourceName("foo", 1) -> Set("x"),
        ResourceName("bar", 2, List("sub", "supersub")) -> Set("p")))

    val dataMap = NaptimeSerializer.PlayJson.serialize(unfiltered)
    val codec = new FlattenedFilteringJacksonDataCodec(fields)
    val serialized = codec.mapToString(dataMap)
    val deserialized = codec.stringToMap(serialized)
    assert(
      NaptimeSerializer.PlayJson.serialize(expected) === deserialized,
      s"Serialized is: $serialized")
    assert(expected === Json.parse(serialized))
  }

  @Ignore
  @Test
  def testEmptyTopLevels(): Unit = {
    val unfiltered = Json.obj(
      "elements" -> Json.arr(Json.obj("a" -> 1), Json.obj("a" -> 2)),
      "paging" -> Json.obj(),
      "linked" -> Json.obj())

    val expected = Json.obj("elements" -> Json.arr(Json.obj("a" -> 1), Json.obj("a" -> 2)))

    val fields = QueryFields(Set("a"), Map.empty)
    val dataMap = NaptimeSerializer.PlayJson.serialize(unfiltered)
    val codec = new FlattenedFilteringJacksonDataCodec(fields)
    val serialized = codec.mapToString(dataMap)
    val deserialized = codec.stringToMap(serialized)
    assert(NaptimeSerializer.PlayJson.serialize(expected) === deserialized)
    assert(expected === Json.parse(serialized))
  }
} 
Example 147
Source File: RecordTemplateNaptimeSerializerTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.actions

import com.linkedin.data.DataList
import com.linkedin.data.DataMap
import org.coursera.naptime.schema.Parameter
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class RecordTemplateNaptimeSerializerTest extends AssertionsForJUnit {

  private[this] def helper[A](obj: A)(implicit naptimeSerializer: NaptimeSerializer[A]): DataMap = {
    naptimeSerializer.serialize(obj)
  }

  @Test
  def simpleTest(): Unit = {
    val parameter = Parameter("parameterName", "fakeType", None, List.empty)

    val expected = new DataMap()
    expected.put("name", "parameterName")
    expected.put("type", "fakeType")
    expected.put("attributes", new DataList())
    expected.put("required", Boolean.box(false))

    assert(expected === helper(parameter))
  }

} 
Example 148
Source File: DataMapUtilsTests.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.actions.util

import com.linkedin.data.DataList
import com.linkedin.data.DataMap
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class DataMapUtilsTests extends AssertionsForJUnit {

  @Test
  def mutableMapUnchanged(): Unit = {
    val myMap = new DataMap()
    assert(myMap == DataMapUtils.ensureMutable(myMap))
  }

  @Test
  def mutableNonEmptyMapUnchanged(): Unit = {
    val myMap = new DataMap()
    myMap.put("1", new Integer(1))
    myMap.put("2", new Integer(2))
    assert(myMap == DataMapUtils.ensureMutable(myMap))
  }

  @Test
  def nonComplexUnchanged(): Unit = {
    val myString = "myString"
    assert(myString eq DataMapUtils.ensureMutable(myString))

    val myInt = new Integer(2)
    assert(myInt eq DataMapUtils.ensureMutable(myInt))

  }

  @Test
  def mutableListUnchanged(): Unit = {
    val myList = new DataList()
    assert(myList eq DataMapUtils.ensureMutable(myList))
  }

  @Test
  def mutableNonEmptyListUnchanged(): Unit = {
    val myList = new DataList()
    myList.add("1")
    myList.add(new Integer(2))
    assert(myList eq DataMapUtils.ensureMutable(myList))
  }

  @Test
  def ensureImmutableListIsMutable(): Unit = {
    val myList = new DataList()
    myList.setReadOnly()
    val mutableList = DataMapUtils.ensureMutable(myList)
    assert(!mutableList.asInstanceOf[DataList].isMadeReadOnly)
  }

  @Test
  def ensureNestedImmutableIsMadeMutable(): Unit = {
    val immutable = new DataMap()
    immutable.put("1", "one")
    immutable.setReadOnly()

    val wrappingList = new DataList()
    wrappingList.add(immutable)

    val wrappingMap = new DataMap()
    wrappingMap.put("list", wrappingList)

    val fullyMutable = DataMapUtils.ensureMutable(wrappingMap)
    assert(fullyMutable.isInstanceOf[DataMap])
    assert(!fullyMutable.asInstanceOf[DataMap].isMadeReadOnly)
    assert(wrappingMap == fullyMutable)

    val mutableList = fullyMutable.asInstanceOf[DataMap].get("list")
    assert(mutableList.isInstanceOf[DataList])
    assert(!mutableList.asInstanceOf[DataList].isMadeReadOnly)
    assert(mutableList == wrappingList)
    assert(mutableList eq wrappingList)

    val innerMap = mutableList.asInstanceOf[DataList].get(0)
    assert(innerMap.isInstanceOf[DataMap])
    assert(innerMap == immutable)
    assert(!innerMap.asInstanceOf[DataMap].isMadeReadOnly)
    assert(innerMap.asInstanceOf[DataMap].get("1") == "one")
  }
} 
Example 149
Source File: DecoratorTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.access.authenticator

import org.coursera.naptime.ResourceTestImplicits
import org.junit.Test
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.junit.AssertionsForJUnit

import scala.concurrent.Future

class DecoratorTest extends AssertionsForJUnit with ScalaFutures with ResourceTestImplicits {

  @Test
  def identity(): Unit = {
    assert(Right("a") === Decorator.identity("a").futureValue)
  }

  @Test
  def simple(): Unit = {
    val decorator = Decorator.function[String, Int] { a =>
      Future.successful(Right(a.length))
    }
    assert(Right(3) === decorator("abc").futureValue)
  }

  @Test
  def composition(): Unit = {
    val decorator1 = Decorator.function[String, Int](a => Future.successful(Right(a.length)))
    val decorator2 = Decorator.function[Int, Int](b => Future.successful(Right(b + 2)))
    assert(Right(5) === decorator1.andThen(decorator2)("abc").futureValue)
  }

  @Test
  def compositionFailingFirst(): Unit = {
    val decorator1 =
      Decorator.function[String, Int](a => Future.failed(new RuntimeException("err")))
    val decorator2 = Decorator.function[Int, Int](i => Future.successful(Right(i + 3)))
    val result = decorator1.andThen(decorator2)("abc")
    try {
      result.futureValue
      fail("Should have thrown an exception!")
    } catch {
      case e: RuntimeException =>
      // pass
    }
  }

  @Test
  def compositionFailingFirstLeft(): Unit = {
    var failTest = false
    val decorator1 = Decorator.function[String, Int](a => Future.successful(Left("err")))
    val decorator2 = Decorator.function[Int, Int] { i =>
      failTest = true // We shouldn't be called.
      Future.successful(Right(i + 3))
    }
    val result = decorator1.andThen(decorator2)("abc")
    assert(Left("err") === result.futureValue)
    assert(!failTest)

  }

  @Test
  def compositionFailingSecond(): Unit = {
    val decorator1 = Decorator.function[String, Int](a => Future.successful(Right(a.length)))
    val decorator2 = Decorator.function[Int, Int](_ => Future.successful(Left("error")))
    val result = decorator1.andThen(decorator2)("abc")
    assert(Left("error") === result.futureValue)
  }

  @Test
  def map(): Unit = {
    val decorator = Decorator.identity[String].map(x => x.length)
    assert(Right(3) === decorator("abc").futureValue)
  }

  @Test
  def flatMap(): Unit = {
    val decorator = Decorator.identity[String].flatMap(x => Right(x.length))
    assert(Right(3) === decorator("abc").futureValue)
  }

  @Test
  def flatMapLeft(): Unit = {
    val decorator = Decorator.identity[String].flatMap(x => Left("error!"))
    assert(Left("error!") === decorator("abc").futureValue)
  }
} 
Example 150
Source File: ResourceNameTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime

import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class ResourceNameTest extends AssertionsForJUnit {

  @Test
  def simpleResourceNameIdentifier(): Unit = {
    assert(ResourceName("foo", 1).identifier === "foo.v1")
  }

  @Test
  def nestedResourceNameIdentifier(): Unit = {
    assert(ResourceName("fooBar", 1, List("history")).identifier === "fooBar.v1/history")
  }

  @Test
  def deeplyNestedResourceNameIdenfier(): Unit = {
    assert(
      ResourceName("fooBarBaz", 103, List("history", "author")).identifier ===
        "fooBarBaz.v103/history/author")
  }

  @Test
  def parseSimple(): Unit = {
    assert(ResourceName("foo", 1) === ResourceName.parse("foo.v1").get)
  }

  @Test
  def parseNested(): Unit = {
    assert(ResourceName("fooBar", 2, List("sub")) === ResourceName.parse("fooBar.v2/sub").get)
  }

  @Test
  def parseDeeplyNested(): Unit = {
    assert(
      ResourceName("fooBar", 3, List("sub", "superSub")) ===
        ResourceName.parse("fooBar.v3/sub/superSub").get)
  }
} 
Example 151
Source File: TypesTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime

import com.linkedin.data.schema.IntegerDataSchema
import com.linkedin.data.schema.StringDataSchema
import org.coursera.naptime.actions.Course
import org.coursera.naptime.actions.EnrollmentId
import org.coursera.naptime.actions.SessionId
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class TypesTest extends AssertionsForJUnit {

  @Test
  def primitiveSchema(): Unit = {
    val resultingType = Types.computeAsymType(
      "org.coursera.naptime.TestResource.Model",
      new IntegerDataSchema,
      Course.SCHEMA,
      ResourceFields.FAKE_FIELDS)

    assert(!resultingType.isErrorRecord)
    assert(resultingType.getFields().size() == 3)
    assert(resultingType.getField("id") != null)
    assert(resultingType.getField("id").getRecord == resultingType)
    assert(resultingType.getField("id").getType == new IntegerDataSchema)
    assert(resultingType.getField("name") != null)
    assert(resultingType.getField("name").getRecord == Course.SCHEMA)
    assert(resultingType.getField("description") != null)
    assert(resultingType.getField("description").getRecord == Course.SCHEMA)
  }

  @Test
  def complexSchema(): Unit = {
    val resultingType = Types.computeAsymType(
      "org.coursera.naptime.ComplexTestResource.Model",
      EnrollmentId.SCHEMA,
      Course.SCHEMA,
      ResourceFields.FAKE_FIELDS)

    assert(!resultingType.isErrorRecord)
    assert(resultingType.getFields().size() == 5)
    assert(resultingType.getField("id") != null)
    assert(resultingType.getField("id").getRecord == resultingType)
    assert(resultingType.getField("id").getType == new StringDataSchema)
    assert(resultingType.getField("userId") != null)
    assert(resultingType.getField("userId").getRecord == EnrollmentId.SCHEMA)
    assert(resultingType.getField("userId").getType == new IntegerDataSchema)
    assert(resultingType.getField("courseId") != null)
    assert(resultingType.getField("courseId").getRecord == EnrollmentId.SCHEMA)
    assert(resultingType.getField("courseId").getType == SessionId.SCHEMA)
    assert(resultingType.getField("name") != null)
    assert(resultingType.getField("name").getRecord == Course.SCHEMA)
    assert(resultingType.getField("description") != null)
    assert(resultingType.getField("description").getRecord == Course.SCHEMA)
  }

  @Test
  def idWithIdField(): Unit = {
    val resultingType = Types.computeAsymType(
      "org.coursera.naptime.IdWithIdTestResource.Model",
      IdWithIdField.SCHEMA,
      Course.SCHEMA,
      ResourceFields.FAKE_FIELDS)

    assert(!resultingType.isErrorRecord)
    assert(resultingType.getFields().size() == 4)
    assert(resultingType.getField("id") != null)
    assert(resultingType.getField("id").getRecord == IdWithIdField.SCHEMA)
    assert(resultingType.getField("id").getType == new IntegerDataSchema)
    assert(resultingType.getField("alias") != null)
    assert(resultingType.getField("alias").getRecord == IdWithIdField.SCHEMA)
    assert(resultingType.getField("alias").getType == new StringDataSchema)
    assert(resultingType.getField("name") != null)
    assert(resultingType.getField("name").getRecord == Course.SCHEMA)
    assert(resultingType.getField("description") != null)
    assert(resultingType.getField("description").getRecord == Course.SCHEMA)
  }
} 
Example 152
Source File: ParsedPathKeyTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.path

import org.coursera.naptime.path.ParsedPathKeyTest.User
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

object ParsedPathKeyTest {
  case class User(name: String, email: String)
}

class ParsedPathKeyTest extends AssertionsForJUnit {

  @Test
  def testEvidence(): Unit = {
    val pathKey = "a" ::: 1 ::: User("Daphne", "[email protected]") ::: RootParsedPathKey
    assert(pathKey.key === "a")
    assert(pathKey.parentKey === 1)
    assert(pathKey.grandparentKey === User("Daphne", "[email protected]"))
  }

  @Test
  def testUnapply(): Unit = {
    val a ::: b ::: RootParsedPathKey = "a" ::: 1 ::: RootParsedPathKey
    assert(a === "a")
    assert(b === 1)
  }
} 
Example 153
Source File: CollectionResourcePathParserTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.path

import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit

class CollectionResourcePathParserTest extends AssertionsForJUnit {

  @Test
  def testSimpleFinalLevelParsing(): Unit = {
    val path = CollectionResourcePathParser[String]("foo", 1)
    assert(ParseSuccess(None, Some("hello")) === path.parseOptUrl("/foo.v1/hello"))
    assert(ParseSuccess(None, None) === path.parseOptUrl("/foo.v1/"))
    assert(ParseSuccess(None, None) === path.parseOptUrl("/foo.v1"))
    assert(ParseFailure === path.parseOptUrl("/foo.v3"))
  }

  @Test
  def testSimpleParsing(): Unit = {
    val path = CollectionResourcePathParser[String]("foo", 1)
    assert(ParseSuccess(None, "hello") === path.parseUrl("/foo.v1/hello"))
    assert(ParseSuccess(Some("/bar.v2"), "world") === path.parseUrl("/foo.v1/world/bar.v2"))
    assert(
      ParseSuccess(Some("/bar.v2/123/baz.v3/321"), "goodbye") ===
        path.parseUrl("/foo.v1/goodbye/bar.v2/123/baz.v3/321"))
    assert(ParseFailure === path.parseUrl("/bar.v1/hello"))
  }

  @Test
  def testTypeConversion(): Unit = {
    val path = CollectionResourcePathParser[Int]("bar", 3)
    assert(ParseSuccess(None, 42) === path.parseUrl("/bar.v3/42"))
    assert(ParseSuccess(Some("/baz.v2"), 111) === path.parseUrl("/bar.v3/111/baz.v2"))
    assert(ParseFailure === path.parseUrl("/baz.v2/hello"))
  }
} 
Example 154
Source File: RouterTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.router2

import com.google.inject.Injector
import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mockito.MockitoSugar
import org.mockito.Mockito._
import org.mockito.Matchers._
import play.api.test.FakeRequest

object RouterTest {
  class Resource1
  class Resource2
  class Resource3

  abstract class ResourceRouterBuilder1 extends ResourceRouterBuilder {
    type ResourceType = Resource1
  }
  abstract class ResourceRouterBuilder2 extends ResourceRouterBuilder {
    type ResourceType = Resource2
  }
  abstract class ResourceRouterBuilder3 extends ResourceRouterBuilder {
    type ResourceType = Resource3
  }

  abstract class ResourceRouter1 extends ResourceRouter {
    type ResourceType = Resource1
  }
  abstract class ResourceRouter2 extends ResourceRouter {
    type ResourceType = Resource2
  }
  abstract class ResourceRouter3 extends ResourceRouter {
    type ResourceType = Resource3
  }
}

class RouterTest extends AssertionsForJUnit with MockitoSugar {
  import RouterTest._

  val resourceRouterBuilder1 = mock[ResourceRouterBuilder1]
  val resourceRouterBuilder2 = mock[ResourceRouterBuilder2]
  val resourceRouterBuilder3 = mock[ResourceRouterBuilder3]
  val resourceRouter1 = mock[ResourceRouter1]
  val resourceRouter2 = mock[ResourceRouter2]
  val resourceRouter3 = mock[ResourceRouter3]
  val resources = List(resourceRouterBuilder1, resourceRouterBuilder2, resourceRouterBuilder3)
  val injector = mock[Injector]
  setupStandardMocks()
  val router = new Router(injector, resources)

  private[this] def setupStandardMocks(): Unit = {
    when(resourceRouterBuilder1.build(any())).thenReturn(resourceRouter1)
    when(resourceRouterBuilder2.build(any())).thenReturn(resourceRouter2)
    when(resourceRouterBuilder3.build(any())).thenReturn(resourceRouter3)
  }
  val fakeRequest = FakeRequest("GET", "/api/foo.v1")

  @Test
  def simpleRouting(): Unit = {
    when(resourceRouter1.routeRequest(any(), any())).thenReturn(Some(null))
    val result = router.onRouteRequest(fakeRequest)
    assert(result.isDefined, "Expected result to be defined.")
    verifyZeroInteractions(resourceRouter2, resourceRouter3)
  }

  @Test
  def stopImmediately(): Unit = {
    when(resourceRouter1.routeRequest(any(), any())).thenReturn(None)
    when(resourceRouter2.routeRequest(any(), any())).thenReturn(Some(null))
    val result = router.onRouteRequest(fakeRequest)
    assert(result.isDefined, "Expected result to be defined.")
    verifyZeroInteractions(resourceRouter3)
  }

  @Test
  def handleNoMatchingRequests(): Unit = {
    when(resourceRouter1.routeRequest(any(), any())).thenReturn(None)
    when(resourceRouter2.routeRequest(any(), any())).thenReturn(None)
    when(resourceRouter3.routeRequest(any(), any())).thenReturn(None)
    val result = router.onRouteRequest(fakeRequest)
    assert(result.isEmpty, "Expected result to be empty.")
  }
} 
Example 155
Source File: NaptimePlayRouterTest.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.router2

import akka.util.ByteString
import com.google.inject.Injector
import org.coursera.naptime.resources.RootResource
import org.coursera.naptime.schema.Handler
import org.coursera.naptime.schema.HandlerKind
import org.coursera.naptime.schema.Parameter
import org.coursera.naptime.schema.Resource
import org.coursera.naptime.schema.ResourceKind
import org.junit.Test
import org.mockito.Mockito.when
import org.mockito.Matchers.any
import org.scalatest.junit.AssertionsForJUnit
import org.scalatest.mockito.MockitoSugar
import play.api.libs.streams.Accumulator
import play.api.mvc.EssentialAction
import play.api.mvc.RequestHeader
import play.api.mvc.RequestTaggingHandler
import play.api.mvc.Result
import play.api.test.FakeRequest

class NaptimePlayRouterTest extends AssertionsForJUnit with MockitoSugar {
  object FakeHandler extends  EssentialAction with RequestTaggingHandler {
    override def tagRequest(request: RequestHeader): RequestHeader = request

    override def apply(v1: RequestHeader): Accumulator[ByteString, Result] = ???
  }

  val resourceSchema = Resource(
    kind = ResourceKind.COLLECTION,
    name = "fakeResource",
    version = Some(1L),
    parentClass = Some(classOf[RootResource].getName),
    keyType = "java.lang.String",
    valueType = "FakeModel",
    mergedType = "FakeResourceModel",
    handlers = List(
      Handler(
        kind = HandlerKind.GET,
        name = "get",
        parameters =
          List(Parameter(name = "id", `type` = "String", attributes = List.empty, default = None)),
        inputBodyType = None,
        customOutputBodyType = None,
        attributes = List.empty)),
    className = "org.coursera.naptime.FakeResource",
    attributes = List.empty)

  val resourceRouter = mock[ResourceRouter]
  val resourceRouterBuilder = mock[ResourceRouterBuilder]
  when(resourceRouterBuilder.build(any())).thenReturn(resourceRouter)
  when(resourceRouterBuilder.schema).thenReturn(resourceSchema)

  val injector = mock[Injector]
  val naptimeRoutes = NaptimeRoutes(injector, Set(resourceRouterBuilder))
  val router = new NaptimePlayRouter(naptimeRoutes)

  @Test
  def simpleRouting(): Unit = {
    when(resourceRouter.routeRequest(any(), any())).thenReturn(Some(FakeHandler))
    val handler = router.handlerFor(FakeRequest())
    assert(handler.isDefined)
  }

  @Test
  def simpleRoutingNothing(): Unit = {
    when(resourceRouter.routeRequest(any(), any())).thenReturn(None)
    val handler = router.handlerFor(FakeRequest())
    assert(handler.isEmpty)
  }

  @Test
  def generateDocumentation(): Unit = {
    val documentation = router.documentation
    assert(1 === documentation.length)
    assert(
      (
        "GET --- GET",
        "/fakeResource.v1/$id",
        "[NAPTIME] org.coursera.naptime.FakeResource.get(id: String)") ===
        documentation.head)
  }
} 
Example 156
Source File: QueryParserTests.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.router2

import org.junit.Test
import org.scalatest.junit.AssertionsForJUnit
import play.api.test.FakeRequest

class QueryParserTests extends AssertionsForJUnit {
  val request = FakeRequest("GET", "/api/myResource.v1?bool1=true&bool2=false&bool3=&bool4=1")

  @Test
  def checkTrueParsing(): Unit = {
    assert(
      CollectionResourceRouter.BooleanFlagParser("bool1", getClass).evaluate(request) ===
        Right(true))
  }

  @Test
  def checkFalseParsing(): Unit = {
    assert(
      CollectionResourceRouter.BooleanFlagParser("bool2", getClass).evaluate(request) ===
        Right(false))
  }

  @Test
  def checkEmptyParsing(): Unit = {
    assert(CollectionResourceRouter.BooleanFlagParser("bool3", getClass).evaluate(request).isLeft)
  }

  @Test
  def checkMalformedParsing(): Unit = {
    assert(CollectionResourceRouter.BooleanFlagParser("bool4", getClass).evaluate(request).isLeft)
  }
} 
Example 157
Source File: TestCompileApplicationInstance.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.tools

import java.io.{OutputStream, OutputStreamWriter}
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.nio.file.Files

import com.amazon.milan.application.{Application, ApplicationConfiguration, ApplicationInstance}
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import com.amazon.milan.{Id, SemanticVersion}
import org.junit.Assert._
import org.junit.Test


object TestCompileApplicationInstance {

  case class Record(recordId: String, i: Int)

  class Provider extends ApplicationInstanceProvider {
    override def getApplicationInstance(params: List[(String, String)]): ApplicationInstance = {
      val input = Stream.of[Record]
      val graph = new StreamGraph(input)
      val config = new ApplicationConfiguration
      config.setListSource(input, Record("1", 1))

      val instanceId = params.find(_._1 == "instanceId").get._2
      val appId = params.find(_._1 == "appId").get._2

      new ApplicationInstance(
        instanceId,
        new Application(appId, graph, SemanticVersion.ZERO),
        config)
    }
  }

  class Compiler extends ApplicationInstanceCompiler {
    override def compile(applicationInstance: ApplicationInstance,
                         params: List[(String, String)],
                         output: OutputStream): Unit = {
      val writer = new OutputStreamWriter(output)
      val testParam = params.find(_._1 == "test").get._2
      writer.write(testParam)
      writer.write(applicationInstance.toJsonString)
      writer.close()
    }
  }

}

@Test
class TestCompileApplicationInstance {
  @Test
  def test_CompileApplicationInstance_Main_SendsProviderAndCompilerParameters(): Unit = {

    val tempFile = Files.createTempFile("TestCompileApplicationInstance", ".scala")
    Files.deleteIfExists(tempFile)

    val appId = Id.newId()
    val instanceId = Id.newId()
    val testValue = Id.newId()

    try {
      val args = Array(
        "--provider",
        "com.amazon.milan.tools.TestCompileApplicationInstance.Provider",
        "--compiler",
        "com.amazon.milan.tools.TestCompileApplicationInstance.Compiler",
        "--package",
        "generated",
        "--output",
        tempFile.toString,
        s"-PinstanceId=$instanceId",
        s"-PappId=$appId",
        s"-Ctest=$testValue"
      )
      CompileApplicationInstance.main(args)

      val fileContents = StandardCharsets.UTF_8.decode(ByteBuffer.wrap(Files.readAllBytes(tempFile))).toString
      assertTrue(fileContents.contains(appId))
      assertTrue(fileContents.contains(instanceId))
      assertTrue(fileContents.contains(testValue))
    }
    finally {
      Files.deleteIfExists(tempFile)
    }
  }
} 
Example 158
Source File: TestGenericTypedJsonDeserializer.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.serialization

import com.amazon.milan.typeutil.{TypeDescriptor, _}
import com.fasterxml.jackson.annotation.JsonCreator
import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize}
import org.junit.Assert._
import org.junit.Test


object TestGenericTypedJsonDeserializer {

  @JsonSerialize(using = classOf[Serializer])
  @JsonDeserialize(using = classOf[Deserializer])
  trait BaseClass[T] extends GenericTypeInfoProvider with SetGenericTypeInfo

  // Classes cannot have implicit parameters and still be deserializable by jackson, because any constructors will have a
  // second argument list - the implicit parameters - and jackson doesn't know how to handle that.
  @JsonSerialize
  @JsonDeserialize
  class DerivedClass[T](typeDesc: TypeDescriptor[T]) extends BaseClass[T] {
    @JsonCreator
    def this() {
      this(null)
    }

    private var genericArguments: List[TypeDescriptor[_]] = if (typeDesc == null) List() else List(typeDesc)

    override def getGenericArguments: List[TypeDescriptor[_]] = this.genericArguments

    override def setGenericArguments(genericArgs: List[TypeDescriptor[_]]): Unit = this.genericArguments = genericArgs

    override def equals(obj: Any): Boolean = this.genericArguments.equals(obj.asInstanceOf[DerivedClass[T]].genericArguments)
  }

  class Container(var x: BaseClass[_]) {
    override def equals(obj: Any): Boolean = this.x.equals(obj.asInstanceOf[Container].x)
  }

  class Serializer extends GenericTypedJsonSerializer[BaseClass[_]]

  class Deserializer extends GenericTypedJsonDeserializer[BaseClass[_]](name => s"com.amazon.milan.serialization.TestGenericTypedJsonDeserializer$$$name")

  case class GenericType[A, B, C](a: A, b: B, c: C)

}

import com.amazon.milan.serialization.TestGenericTypedJsonDeserializer._


@Test
class TestGenericTypedJsonDeserializer {
  @Test
  def test_GenericTypedJsonDeserializer_WithGenericTypeAsTypeParameter(): Unit = {
    val original = new Container(new DerivedClass[GenericType[Int, Long, Double]](createTypeDescriptor[GenericType[Int, Long, Double]]))
    val json = MilanObjectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(original)
    val copy = MilanObjectMapper.readValue(json, classOf[Container])
    assertEquals(original, copy)
  }
} 
Example 159
Source File: TestCloudFormationJsonGenerators.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.aws.metrics

import org.junit.Assert._
import org.junit.Test


@Test
class TestCloudFormationJsonGenerators {
  @Test
  def test_dashboard__no_widgets(): Unit = {
    val dashboardJsonGenerator = new DashboardJsonGenerator
    assertEquals("{\n  \\\"widgets\\\": [\n\n    \n\n  ]\n}", dashboardJsonGenerator.toJson)
  }

  @Test
  def test_dashboard__single_widgets(): Unit = {
    val dashboardJsonGenerator = new DashboardJsonGenerator()
      .withWidget(new TestWidgetGenerator)

    assertEquals("{\n  \\\"widgets\\\": [\n\n    testjson\n\n  ]\n}", dashboardJsonGenerator.toJson)
  }

  @Test
  def test_dashboard__multiple_widgets(): Unit = {
    val dashboardJsonGenerator = new DashboardJsonGenerator()
      .withWidgets(List(new TestWidgetGenerator, new TestWidgetGenerator))

    assertEquals("{\n  \\\"widgets\\\": [\n\n    testjson,\ntestjson\n\n  ]\n}", dashboardJsonGenerator.toJson)
  }

  @Test
  def test_line_widget(): Unit = {
    val generatedJSON = new LineWidgetJsonGenerator(
      namespace = "ns",
      metricName = "mn",
      metricType = "mt",
      stacked = true,
      region = "reg",
      periodSec = 30,
      title = "tit",
      axisLabel = "lab",
      host = "hos",
      liveData = false
    ).toJson

    assertEquals(
      "{\n  \\\"type\\\": \\\"metric\\\",\n  \\\"x\\\": 0,\n  \\\"y\\\": 0,\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"metrics\\\": [\n      [ \\\"ns\\\", \\\"mn\\\", \\\"host\\\", \\\"hos\\\", \\\"metric_type\\\", \\\"mt\\\", { \\\"label\\\": \\\"p50\\\", \\\"stat\\\": \\\"p50\\\" } ],\n      [ \\\"...\\\", { \\\"stat\\\": \\\"p90\\\", \\\"label\\\": \\\"p90\\\" } ],\n      [ \\\"...\\\", { \\\"label\\\": \\\"p99\\\" } ]\n    ],\n    \\\"view\\\": \\\"timeSeries\\\",\n    \\\"stacked\\\": true,\n    \\\"region\\\": \\\"reg\\\",\n    \\\"liveData\\\": false,\n    \\\"stat\\\": \\\"p99\\\",\n    \\\"period\\\": 30,\n    \\\"title\\\": \\\"tit\\\",\n    \\\"yAxis\\\": {\n      \\\"left\\\": {\n        \\\"showUnits\\\": false,\n        \\\"label\\\": \\\"lab\\\"\n      },\n      \\\"right\\\": {\n        \\\"label\\\": \\\"\\\"\n      }\n    }\n  }\n}",
      generatedJSON
    )
  }

  @Test
  def test_text_widget(): Unit = {
    val generatedJSON = new TextWidgetJsonGenerator("hello").toJson

    assertEquals(
      "{\n  \\\"type\\\": \\\"text\\\",\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"markdown\\\": \\\"hello\\\"\n  }\n}",
      generatedJSON)
  }

  class TestWidgetGenerator extends WidgetJsonGenerator {
    override def toJson: String = "testjson"
  }

} 
Example 160
Source File: TestDashboardCompiler.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.aws.metrics

import org.junit.Assert._
import org.junit.Test


@Test
class TestDashboardCompiler {
  @Test
  def testCompile(): Unit = {
    val template = DashboardCompiler.compile(
      "applicationInstanceId",
      List(
        CompiledMetric("meterName", "meterOp", "Meter"),
        CompiledMetric("counterName", "counterOp", "Counter")),
      List("ip-worker-1", "ip-worker-2"), 3, "eu-west-1", "Milan", "ip-master")

    val expectedTemplate = "{\n  \"AWSTemplateFormatVersion\": \"2010-09-09\",\n  \"Resources\": {\n    \"Dashboard\": {\n      \"Type\": \"AWS::CloudWatch::Dashboard\",\n      \"Properties\": {\n        \"DashboardName\": \"milan-dashboard-applicationInstanceId\",\n        \"DashboardBody\": \"{\n  \\\"widgets\\\": [\n\n    {\n  \\\"type\\\": \\\"text\\\",\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"markdown\\\": \\\"This dashboard was created for Milan using CloudFormation and should not be changed in the console.\\\"\n  }\n},\n{\n  \\\"type\\\": \\\"metric\\\",\n  \\\"x\\\": 0,\n  \\\"y\\\": 0,\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"metrics\\\": [\n      [ \\\"Milan\\\", \\\"ip-worker-1_applicationInstanceId_meterOp_0_meterName_rate\\\", \\\"host\\\", \\\"ip-master\\\", \\\"metric_type\\\", \\\"gauge\\\", { \\\"label\\\": \\\"p50\\\", \\\"stat\\\": \\\"p50\\\" } ],\n      [ \\\"...\\\", { \\\"stat\\\": \\\"p90\\\", \\\"label\\\": \\\"p90\\\" } ],\n      [ \\\"...\\\", { \\\"label\\\": \\\"p99\\\" } ]\n    ],\n    \\\"view\\\": \\\"timeSeries\\\",\n    \\\"stacked\\\": false,\n    \\\"region\\\": \\\"eu-west-1\\\",\n    \\\"liveData\\\": false,\n    \\\"stat\\\": \\\"p99\\\",\n    \\\"period\\\": 300,\n    \\\"title\\\": \\\"ip-worker-1_meterOp_0_meterName_rate\\\",\n    \\\"yAxis\\\": {\n      \\\"left\\\": {\n        \\\"showUnits\\\": false,\n        \\\"label\\\": \\\"Events/second\\\"\n      },\n      \\\"right\\\": {\n        \\\"label\\\": \\\"\\\"\n      }\n    }\n  }\n},\n{\n  \\\"type\\\": \\\"metric\\\",\n  \\\"x\\\": 0,\n  \\\"y\\\": 0,\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"metrics\\\": [\n      [ \\\"Milan\\\", \\\"ip-worker-2_applicationInstanceId_meterOp_0_meterName_rate\\\", \\\"host\\\", \\\"ip-master\\\", \\\"metric_type\\\", \\\"gauge\\\", { \\\"label\\\": \\\"p50\\\", \\\"stat\\\": \\\"p50\\\" } ],\n      [ \\\"...\\\", { \\\"stat\\\": \\\"p90\\\", \\\"label\\\": \\\"p90\\\" } ],\n      [ \\\"...\\\", { \\\"label\\\": \\\"p99\\\" } ]\n    ],\n    \\\"view\\\": \\\"timeSeries\\\",\n    \\\"stacked\\\": false,\n    \\\"region\\\": \\\"eu-west-1\\\",\n    \\\"liveData\\\": false,\n    \\\"stat\\\": \\\"p99\\\",\n    \\\"period\\\": 300,\n    \\\"title\\\": \\\"ip-worker-2_meterOp_0_meterName_rate\\\",\n    \\\"yAxis\\\": {\n      \\\"left\\\": {\n        \\\"showUnits\\\": false,\n        \\\"label\\\": \\\"Events/second\\\"\n      },\n      \\\"right\\\": {\n        \\\"label\\\": \\\"\\\"\n      }\n    }\n  }\n},\n{\n  \\\"type\\\": \\\"metric\\\",\n  \\\"x\\\": 0,\n  \\\"y\\\": 0,\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"metrics\\\": [\n      [ \\\"Milan\\\", \\\"ip-worker-1_applicationInstanceId_counterOp_0_counterName\\\", \\\"host\\\", \\\"ip-master\\\", \\\"metric_type\\\", \\\"gauge\\\", { \\\"label\\\": \\\"p50\\\", \\\"stat\\\": \\\"p50\\\" } ],\n      [ \\\"...\\\", { \\\"stat\\\": \\\"p90\\\", \\\"label\\\": \\\"p90\\\" } ],\n      [ \\\"...\\\", { \\\"label\\\": \\\"p99\\\" } ]\n    ],\n    \\\"view\\\": \\\"timeSeries\\\",\n    \\\"stacked\\\": false,\n    \\\"region\\\": \\\"eu-west-1\\\",\n    \\\"liveData\\\": false,\n    \\\"stat\\\": \\\"p99\\\",\n    \\\"period\\\": 300,\n    \\\"title\\\": \\\"ip-worker-1_counterOp_0_counterName\\\",\n    \\\"yAxis\\\": {\n      \\\"left\\\": {\n        \\\"showUnits\\\": false,\n        \\\"label\\\": \\\"Count\\\"\n      },\n      \\\"right\\\": {\n        \\\"label\\\": \\\"\\\"\n      }\n    }\n  }\n},\n{\n  \\\"type\\\": \\\"metric\\\",\n  \\\"x\\\": 0,\n  \\\"y\\\": 0,\n  \\\"width\\\": 6,\n  \\\"height\\\": 6,\n  \\\"properties\\\": {\n    \\\"metrics\\\": [\n      [ \\\"Milan\\\", \\\"ip-worker-2_applicationInstanceId_counterOp_0_counterName\\\", \\\"host\\\", \\\"ip-master\\\", \\\"metric_type\\\", \\\"gauge\\\", { \\\"label\\\": \\\"p50\\\", \\\"stat\\\": \\\"p50\\\" } ],\n      [ \\\"...\\\", { \\\"stat\\\": \\\"p90\\\", \\\"label\\\": \\\"p90\\\" } ],\n      [ \\\"...\\\", { \\\"label\\\": \\\"p99\\\" } ]\n    ],\n    \\\"view\\\": \\\"timeSeries\\\",\n    \\\"stacked\\\": false,\n    \\\"region\\\": \\\"eu-west-1\\\",\n    \\\"liveData\\\": false,\n    \\\"stat\\\": \\\"p99\\\",\n    \\\"period\\\": 300,\n    \\\"title\\\": \\\"ip-worker-2_counterOp_0_counterName\\\",\n    \\\"yAxis\\\": {\n      \\\"left\\\": {\n        \\\"showUnits\\\": false,\n        \\\"label\\\": \\\"Count\\\"\n      },\n      \\\"right\\\": {\n        \\\"label\\\": \\\"\\\"\n      }\n    }\n  }\n}\n\n  ]\n}\"\n      }\n    }\n  }\n}"

    assertEquals(expectedTemplate, template)
  }
} 
Example 161
Source File: TestListDataSource.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.application.sources

import com.amazon.milan.application.DataSource
import com.amazon.milan.serialization.MilanObjectMapper
import com.amazon.milan.typeutil.createTypeDescriptor
import org.junit.Assert._
import org.junit.Test


object TestListDataSource {

  case class Record(i: Int)

}

import com.amazon.milan.application.sources.TestListDataSource._


@Test
class TestListDataSource {
  @Test
  def test_ListDataSource_AfterSerializationAndDeserializaton_ContainsEquivalentElements(): Unit = {
    val original = new ListDataSource(List(Record(1), Record(2)))
    val originalAsSource = original.asInstanceOf[DataSource[Record]]
    val copy = MilanObjectMapper.copy(originalAsSource)
    assertEquals(original.values, copy.asInstanceOf[ListDataSource[Record]].values)
  }
} 
Example 162
Source File: TestJsonDataInputFormat.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.dataformats

import com.amazon.milan.serialization.{DataFormatConfiguration, DataFormatFlags, MilanObjectMapper}
import com.amazon.milan.typeutil.createTypeDescriptor
import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException
import org.junit.Assert._
import org.junit.Test


object TestJsonDataInputFormat {

  case class Record()

  case class GenericRecord[T](i: T, l: List[T])

}

import com.amazon.milan.dataformats.TestJsonDataInputFormat._


@Test
class TestJsonDataInputFormat {
  @Test
  def test_JsonDataInputFormat_SerializeAndDeserializeAsDataFormat_ReturnsEquivalentObject(): Unit = {
    val target = new JsonDataInputFormat[Record](DataFormatConfiguration.default)
    val json = MilanObjectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(target)
    val deserialized = MilanObjectMapper.readValue[DataInputFormat[Any]](json, classOf[DataInputFormat[Any]])
    assertEquals(target, deserialized)
  }

  @Test
  def test_JsonDataInputFormat_SerializeAndDeserializeAsDataFormat_ReturnsSerializerWithEquivalentBehavior(): Unit = {
    val original = new JsonDataInputFormat[GenericRecord[Integer]](DataFormatConfiguration.default)
    val json = MilanObjectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(original)
    val copy = MilanObjectMapper.readValue[DataInputFormat[GenericRecord[Integer]]](json, classOf[DataInputFormat[GenericRecord[Integer]]])

    val record = GenericRecord[Integer](5, List(1, 2, 3, 4))
    val recordBytes = MilanObjectMapper.writeValueAsBytes(record)

    val deserByOriginal = original.readValue(recordBytes, 0, recordBytes.length).get
    val deserByCopy = copy.readValue(recordBytes, 0, recordBytes.length).get
    assertEquals(deserByOriginal, deserByCopy)
  }

  @Test(expected = classOf[UnrecognizedPropertyException])
  def test_JsonDataInputFormat_ReadValue_WithFailOnUnknownPropertiesTrueAndExtraPropertyInJson_ThrowsUnrecognizedPropertyException(): Unit = {
    val format = new JsonDataInputFormat[Record](DataFormatConfiguration.withFlags(DataFormatFlags.FailOnUnknownProperties))
    val jsonBytes = "{ \"unknownProperty\": 0, \"value\": 1 }".getBytes("utf-8")
    format.readValue(jsonBytes, 0, jsonBytes.length).get
  }
} 
Example 163
Source File: TestCsvDataInputFormat.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.dataformats

import java.nio.charset.StandardCharsets

import com.amazon.milan.serialization.{DataFormatConfiguration, DataFormatFlags, MilanObjectMapper}
import com.amazon.milan.test.IntStringRecord
import com.amazon.milan.typeutil._
import org.junit.Assert._
import org.junit.Test


object TestCsvDataInputFormat {

  class TestClass(var intValue: Int, var stringValue: String, var doubleValue: Double) {
    def this() {
      this(0, "", 0)
    }
  }

}

import com.amazon.milan.dataformats.TestCsvDataInputFormat._


@Test
class TestCsvDataInputFormat {
  @Test
  def test_CsvDataInputFormat_ReadValue_WithUtf8EncodedCsvRow_ReturnsCorrectObject(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("intValue", "stringValue", "doubleValue"), DataFormatConfiguration.default)
    val row = "1,\"foo bar\",3.14"
    val rowBytes = row.getBytes("utf-8")
    val output = format.readValue(rowBytes, 0, rowBytes.length).get

    assertEquals(1, output.intValue)
    assertEquals("foo bar", output.stringValue)
    assertEquals(3.14, output.doubleValue, 1e-10)
  }

  @Test
  def test_CsvDataInputFormat_ReadValue_WithUtf8EncodedCsvRowWithOneFieldMissing_ReturnsObjectWithDefaultValueForThatField(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("intValue", "stringValue", "doubleValue"), DataFormatConfiguration.default)
    val row = "1,\"foo bar\""
    val rowBytes = row.getBytes("utf-8")
    val output = format.readValue(rowBytes, 0, rowBytes.length).get

    assertEquals(1, output.intValue)
    assertEquals("foo bar", output.stringValue)
    assertEquals(0.0, output.doubleValue, 0)
  }

  @Test(expected = classOf[PropertyNotFoundException])
  def test_CsvDataInputFormat_ReadValue_WithFailOnUnknownPropertiesTrue_AndUnknownPropertyInSchema_ThrowsUnrecognizedPropertyException(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("unknownProperty"), DataFormatConfiguration.withFlags(DataFormatFlags.FailOnUnknownProperties))
    val row = "1"
    val rowBytes = row.getBytes("utf-8")
    format.readValue(rowBytes, 0, rowBytes.length)
  }

  @Test
  def test_CsvDataInputFormat_ReadValue_WithFailOnUnknownPropertiesFalse_AndUnknownPropertyInSchema_DoesNotThrow(): Unit = {
    val format = new CsvDataInputFormat[TestClass](Array("unknownProperty"), DataFormatConfiguration.default)
    val row = "1"
    val rowBytes = row.getBytes("utf-8")
    format.readValue(rowBytes, 0, rowBytes.length)
  }

  @Test
  def test_CsvDataInputFormat_WithFailOnUnknownPropertiesTrue_JsonSerializeAndDeserializeAsDataFormat_YieldsEquivalentObject(): Unit = {
    val original = new CsvDataInputFormat[TestClass](
      Array("intValue", "stringValue", "doubleValue"),
      DataFormatConfiguration.withFlags(DataFormatFlags.FailOnUnknownProperties))

    val copy = MilanObjectMapper.copy(original.asInstanceOf[DataInputFormat[TestClass]])

    assertEquals(original, copy)
  }

  @Test
  def test_CsvDataInputFormat_WithNonStandardSeparatorAndNullIdentifier_CorrectlyParsesARecord(): Unit = {
    val format = new CsvDataInputFormat[IntStringRecord](Array("i", "s"), true, 0x01, "\\N", DataFormatConfiguration.default)
    val inputRecordString = "3\u0001\\N"
    val inputRecordBytes = inputRecordString.getBytes(StandardCharsets.UTF_8)
    val record = format.readValue(inputRecordBytes, 0, inputRecordBytes.length).get
    assertEquals(IntStringRecord(3, null), record)
  }
} 
Example 164
Source File: TestStream.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.lang

import com.amazon.milan.program.ExternalStream
import com.amazon.milan.test.{IntStringRecord, Tuple3Record}
import org.junit.Assert._
import org.junit.Test


@Test
class TestStream {
  @Test
  def test_Stream_Of_ReturnsStreamWithExternalStreamNodeAndCorrectTypeName(): Unit = {
    val input = Stream.of[IntStringRecord]

    val ExternalStream(_, _, streamType) = input.expr
    assertEquals(TypeUtil.getTypeName(classOf[IntStringRecord]), streamType.recordType.fullName)
  }

  @Test
  def test_Stream_Of_GenericType_HasRecordTypeWithGenericArguments(): Unit = {
    val input = Stream.of[Tuple3Record[Int, Long, Double]]

    val ExternalStream(_, _, streamType) = input.expr
    assertEquals(3, streamType.recordType.genericArguments.length)
  }
} 
Example 165
Source File: TestFunctionOfStream.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.lang

import com.amazon.milan.program.{FunctionDef, SelectTerm, StreamMap, ValueDef}
import com.amazon.milan.test.{IntKeyValueRecord, StringRecord}
import org.junit.Test


object TestFunctionOfStream {
  def combineKeyValue(key: Int, value: Int): String = s"$key.$value"
}

import com.amazon.milan.lang.TestFunctionOfStream._


@Test
class TestFunctionOfStream {
  @Test
  def test_FunctionOfStream_WithGroupByThenMapUsingUserFunction_PutsUsersFunctionExpressionInTree(): Unit = {
    def mapStream(stream: Stream[IntKeyValueRecord]): Stream[StringRecord] = {
      stream.map(r => StringRecord(combineKeyValue(r.key, r.value)))
    }

    val input = Stream.of[IntKeyValueRecord]
    val grouped = input.groupBy(r => r.key)
    val mapped = grouped.map((key, group) => mapStream(group))

    val StreamMap(_, FunctionDef(List(ValueDef("key", _), ValueDef("group", _)), StreamMap(SelectTerm("group"), _))) = mapped.expr
  }
} 
Example 166
Source File: TestWindow.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.lang

import java.time.Duration

import com.amazon.milan.lang.aggregation._
import com.amazon.milan.program
import com.amazon.milan.program.{GroupBy, _}
import com.amazon.milan.test.{DateIntRecord, DateKeyValueRecord}
import com.amazon.milan.typeutil.{FieldDescriptor, types}
import org.junit.Assert._
import org.junit.Test

import scala.language.existentials


@Test
class TestWindow {
  @Test
  def test_TumblingWindow_ReturnsStreamWithCorrectInputNodeAndWindowProperties(): Unit = {
    val stream = Stream.of[DateIntRecord]
    val windowed = stream.tumblingWindow(r => r.dateTime, Duration.ofHours(1), Duration.ofMinutes(30))

    val TumblingWindow(_, dateExtractorFunc, period, offset) = windowed.expr

    // If this extraction doesn't throw an exception then the formula is correct.
    val FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "dateTime")) = dateExtractorFunc

    assertEquals(Duration.ofHours(1), period.asJava)
    assertEquals(Duration.ofMinutes(30), offset.asJava)
  }

  @Test
  def test_TumblingWindow_ThenSelectToTuple_ReturnsStreamWithCorrectFieldComputationExpression(): Unit = {
    val stream = Stream.of[DateIntRecord]
    val grouped = stream.tumblingWindow(r => r.dateTime, Duration.ofHours(1), Duration.ofMinutes(30))
    val selected = grouped.select((key, r) => fields(field("max", max(r.i))))

    val Aggregate(source, FunctionDef(_, NamedFields(fieldList))) = selected.expr

    assertEquals(1, selected.recordType.fields.length)
    assertEquals(FieldDescriptor("max", types.Int), selected.recordType.fields.head)

    assertEquals(1, fieldList.length)
    assertEquals("max", fieldList.head.fieldName)

    // If this extraction statement doesn't crash then we're good.
    val Max(SelectField(SelectTerm("r"), "i")) = fieldList.head.expr
  }

  @Test
  def test_SlidingWindow_ReturnsStreamWithCorrectInputNodeAndWindowProperties(): Unit = {
    val stream = Stream.of[DateIntRecord]
    val windowed = stream.slidingWindow(r => r.dateTime, Duration.ofHours(1), Duration.ofMinutes(10), Duration.ofMinutes(30))

    val SlidingWindow(_, dateExtractorFunc, size, slide, offset) = windowed.expr

    val FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "dateTime")) = dateExtractorFunc

    assertEquals(Duration.ofHours(1), size.asJava)
    assertEquals(Duration.ofMinutes(10), slide.asJava)
    assertEquals(Duration.ofMinutes(30), offset.asJava)
  }

  @Test
  def test_GroupBy_ThenTumblingWindow_ThenSelect_ReturnsStreamWithCorrectInputNodeAndWindowProperties(): Unit = {
    val input = Stream.of[DateKeyValueRecord].withId("input")
    val output = input.groupBy(r => r.key)
      .tumblingWindow(r => r.dateTime, Duration.ofMinutes(5), Duration.ZERO)
      .select((windowStart, r) => any(r))

    val Aggregate(windowExpr, FunctionDef(List(ValueDef("windowStart", _), ValueDef("r", _)), First(SelectTerm("r")))) = output.expr
    val TumblingWindow(groupExpr, FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "dateTime")), program.Duration(300000), program.Duration(0)) = windowExpr
    val GroupBy(ExternalStream("input", "input", _), FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "key"))) = groupExpr
  }
} 
Example 167
Source File: TestNamedFields.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.lang

import com.amazon.milan.program.{ConstantValue, NamedField, NamedFields, Tree}
import org.junit.Test


@Test
class TestNamedFields {
  @Test
  def test_NamedFields_WithTwoFieldsWithConstantValues_ProducesExpectedExpressionTree(): Unit = {
    val tree = Tree.fromExpression(fields(
      field("x", 1),
      field("y", "foo")
    ))

    val NamedFields(List(
    NamedField("x", ConstantValue(1, _)),
    NamedField("y", ConstantValue("foo", _))
    )) = tree
  }
} 
Example 168
Source File: TestGroupBy.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.lang

import com.amazon.milan.lang.aggregation._
import com.amazon.milan.program.{ValueDef, _}
import com.amazon.milan.test.{IntKeyValueRecord, IntRecord}
import com.amazon.milan.typeutil.{FieldDescriptor, types}
import org.junit.Assert._
import org.junit.Test

object TestGroupBy {
  def transformGroup(group: Stream[IntKeyValueRecord]): Stream[IntRecord] = {
    group.map(r => IntRecord(r.value))
  }
}


@Test
class TestGroupBy {
  @Test
  def test_Stream_GroupBy_ReturnsStreamWithCorrectInputNodeAndKeyFunction(): Unit = {
    val stream = Stream.of[IntKeyValueRecord]
    val grouped = stream.groupBy(r => r.key)

    // If this extraction statement doesn't crash then we're good.
    val GroupBy(_, FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "key"))) = grouped.expr

    assertEquals(stream.expr, grouped.expr.getChildren.head)
  }

  @Test
  def test_Stream_GroupBy_ThenSelectToTuple_ReturnsStreamWithCorrectFieldComputationExpression(): Unit = {
    val stream = Stream.of[IntKeyValueRecord]
    val grouped = stream.groupBy(r => r.key)
    val selected = grouped.select((key, r) => fields(field("i", key)))

    assertEquals(1, selected.recordType.fields.length)
    assertEquals(FieldDescriptor("i", types.Int), selected.recordType.fields.head)

    val aggExpr = selected.expr.asInstanceOf[Aggregate]
    assertEquals(1, aggExpr.recordType.fields.length)
    assertEquals("i", aggExpr.recordType.fields.head.name)

    // If this extraction statement doesn't crash then we're good.
    val FunctionDef(List(ValueDef("key", _), ValueDef("r", _)), NamedFields(List(NamedField("i", SelectTerm("key"))))) = aggExpr.expr
  }

  @Test
  def test_Stream_GroupBy_ThenSelectToObject_ReturnsStreamWithCorrectMapFunction(): Unit = {
    val stream = Stream.of[IntKeyValueRecord]
    val grouped = stream.groupBy(r => r.key)
    val selected = grouped.select((key, r) => argmax(r.value, r))

    val aggExpr = selected.expr.asInstanceOf[Aggregate]

    // If this extraction statement doesn't crash then we're good.
    val FunctionDef(List(ValueDef("key", _), ValueDef("r", _)), ArgMax(Tuple(List(SelectField(SelectTerm("r"), "value"), SelectTerm("r"))))) = aggExpr.expr
  }
} 
Example 169
Source File: TestConvertExpression.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.program

import com.amazon.milan.test.{IntRecord, KeyValueRecord}
import com.amazon.milan.typeutil.{TypeDescriptor, types}
import org.junit.Test

object TestConvertExpression {
  val ConstantValue: String = "ConstantValue"
}


@Test
class TestConvertExpression {
  @Test
  def test_ConvertExpression_NewObject_ReturnsExpectedExpressionTree(): Unit = {
    val tree = Tree.fromExpression(new IntRecord(0))
    val CreateInstance(TypeDescriptor("com.amazon.milan.test.IntRecord"), List(ConstantValue(0, types.Int))) = tree
  }

  @Test
  def test_ConvertExpression_TupleFunction_ReturnsExpectedExpressionTree(): Unit = {
    val tree = Tree.fromExpression((a: Int, b: (Int, Int)) => b match {
      case (x, y) => a == x
    })

    val FunctionDef(List(ValueDef("a", _), ValueDef("b", _)), Unpack(SelectTerm("b"), List("x", "y"), Equals(SelectTerm("a"), SelectTerm("x")))) = tree
  }

  @Test
  def test_ConvertExpression_TwoArgumentFunction_ThatReferencesOneArgument_ReturnsSelectTerm(): Unit = {
    val tree = Tree.fromExpression((a: Int, b: String) => a)
    val FunctionDef(List(ValueDef("a", _), ValueDef("b", _)), SelectTerm("a")) = tree
  }

  @Test
  def test_ConvertExpression_OneArgumentFunction_ThatReferencesAFieldOfThatArgument_ReturnsSelectField(): Unit = {
    val tree = Tree.fromExpression((a: IntRecord) => a.i)
    val FunctionDef(List(ValueDef("a", _)), SelectField(SelectTerm("a"), "i")) = tree
  }

  @Test
  def test_ConvertExpression_TwoArgumentFunction_WithOneRecordAndOneTupleArgument_ThatUnpacksTupleArgumentAndUsesAllFields_ReturnsExpectedTree(): Unit = {
    val tree = Tree.fromExpression((c: KeyValueRecord, ab: (KeyValueRecord, KeyValueRecord)) => ab match {
      case (a, b) => a != null && b != null && c != null
    })
    val FunctionDef(List(ValueDef("c", _), ValueDef("ab", _)), Unpack(SelectTerm("ab"), List("a", "b"), And(And(Not(IsNull(SelectTerm("a"))), Not(IsNull(SelectTerm("b")))), Not(IsNull(SelectTerm("c")))))) = tree
  }

  @Test
  def test_ConvertExpression_WithSimpleExpressionThatUsesLocalIntVariable_ConvertsVariableValueIntoConstant(): Unit = {
    val threshold = 5
    val tree = Tree.fromExpression((i: Int) => i > threshold)
    val FunctionDef(List(ValueDef("i", _)), GreaterThan(SelectTerm("i"), ConstantValue(5, types.Int))) = tree
  }

  @Test
  def test_ConvertExpression_WithSimpleExpressionThatUsesLocalStringVariable_ConvertsVariableValueIntoConstant(): Unit = {
    val value = "value"
    val tree = Tree.fromExpression((s: String) => s == value)
    val FunctionDef(List(ValueDef("s", _)), Equals(SelectTerm("s"), ConstantValue(s, types.String))) = tree
  }

  @Test
  def test_ConvertExpression_WithMatchExpressionThatUsesLocalIntVariable_ConvertsVariableValueIntoConstant(): Unit = {
    val threshold = 5
    val tree = Tree.fromExpression((t: (Int, Long)) => t match {
      case (i, _) => i > threshold
    })
    val FunctionDef(List(ValueDef("t", _)), Unpack(SelectTerm("t"), List("i", "_"), GreaterThan(SelectTerm("i"), ConstantValue(5, types.Int)))) = tree
  }

  @Test
  def test_ConvertExpression_WithSimpleExpressionThatUsesIntArgument_ConvertsVariableValueIntoConstant(): Unit = {
    def getTree(threshold: Int): Tree =
      Tree.fromExpression((i: Int) => i > threshold)

    val tree = getTree(5)
    val FunctionDef(List(ValueDef("i", _)), GreaterThan(SelectTerm("i"), ConstantValue(5, types.Int))) = tree
  }

  @Test
  def test_ConvertExpression_WithConstantVal_ConvertsValueIntoConstant(): Unit = {
    val tree = Tree.fromExpression(TestConvertExpression.ConstantValue)
    val ConstantValue("ConstantValue", types.String) = tree
  }

  @Test
  def test_ConvertExpression_IntToString_ProducesConvertTypeExpression(): Unit = {
    val tree = Tree.fromFunction((i: Int) => i.toString)
    val FunctionDef(List(ValueDef("i", _)), ConvertType(SelectTerm("i"), types.String)) = tree
  }

  @Test
  def test_ConvertExpression_StringToInt_ProducesConvertTypeExpression(): Unit = {
    val tree = Tree.fromFunction((s: String) => s.toInt)
    val FunctionDef(List(ValueDef("s", _)), ConvertType(SelectTerm("s"), types.Int)) = tree
  }
} 
Example 170
Source File: TestTree.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.program

import com.amazon.milan.typeutil.types
import org.junit.Assert._
import org.junit.Test


@Test
class TestTree {
  @Test
  def test_Tree_ToString_ForApplyFunctionTree_ReturnsExpectedString(): Unit = {
    val tree = new ApplyFunction(FunctionReference("MyType", "MyFunction"), List(new SelectField(SelectTerm("foo"), "bar")), types.Boolean)
    val str = tree.toString
    assertEquals("ApplyFunction(FunctionReference(\"MyType\", \"MyFunction\"), List(SelectField(SelectTerm(\"foo\"), \"bar\")), TypeDescriptor(\"Boolean\"))", str)
  }

  @Test
  def test_Tree_ToString_ForConstantValueOfInteger_ReturnsExpectedString(): Unit = {
    val tree = new ConstantValue(1, types.Int)
    val str = tree.toString
    assertEquals("ConstantValue(1, TypeDescriptor(\"Int\"))", str)
  }
} 
Example 171
Source File: TestTreeParser.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.program

import com.amazon.milan.typeutil.TypeDescriptor
import org.junit.Test


@Test
class TestTreeParser {
  @Test
  def test_TreeParser_Parse_WithFunctionTree_ReturnsMatchingTreeStructure(): Unit = {
    val tree = TreeParser.parse[FunctionDef]("FunctionDef(List(ValueDef(\"x\"), ValueDef(\"y\")), Equals(SelectTerm(\"x\"), SelectTerm(\"y\")))")
    val FunctionDef(List(ValueDef("x", _), ValueDef("y", _)), Equals(SelectTerm("x"), SelectTerm("y"))) = tree
  }

  @Test
  def test_TreeParser_Parse_WithConstantIntValueTree_ReturnsMatchingTreeStructure(): Unit = {
    val tree = TreeParser.parse[ConstantValue]("ConstantValue(1, TypeDescriptor(\"Int\"))")
    val ConstantValue(1, TypeDescriptor("Int")) = tree
  }
} 
Example 172
Source File: TestTypeChecker.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.program

import com.amazon.milan.lang.{field, fields}
import com.amazon.milan.test.IntRecord
import com.amazon.milan.typeutil.{TypeDescriptor, types}
import org.junit.Assert._
import org.junit.Test


@Test
class TestTypeChecker {
  @Test
  def test_TypeChecker_TypeCheck_MapRecordOfFlatMapOfGroupBy_DoesntThrowException(): Unit = {
    val expr = new StreamMap(
      new FlatMap(
        new GroupBy(
          new ExternalStream("input", "input", TypeDescriptor.streamOf[IntRecord]),
          FunctionDef.create(List("r"), SelectField(SelectTerm("r"), "i"))),
        FunctionDef.create(
          List("k", "g"),
          StreamArgMax(SelectTerm("g"), FunctionDef.create(List("r"), SelectField(SelectTerm("r"), "i"))))
      ),
      FunctionDef.create(List("r"), CreateInstance(TypeDescriptor.of[IntRecord], List(ConstantValue(1, types.Int))))
    )

    TypeChecker.typeCheck(expr, Map("input" -> TypeDescriptor.streamOf[IntRecord]))
  }

  @Test
  def test_TypeChecker_TypeCheck_WithFunctionUsingNamedFields_ProducesNamedTupleReturnType(): Unit = {
    val tree = Tree.fromFunction((i: Int) => fields(
      field("a", i + 1),
      field("b", i.toString)
    )).withArgumentTypes(List(types.Int))

    TypeChecker.typeCheck(tree)

    val expectedType = TypeDescriptor.createNamedTuple(List(("a", types.Int), ("b", types.String)))
    assertEquals(expectedType, tree.tpe)
  }
} 
Example 173
Source File: TestArgumentsBase.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.cmd

import org.junit.Assert._
import org.junit.Test

import scala.collection.mutable


object TestArgumentsBase {

  class ArgsOneRequiredStringArgument extends ArgumentsBase {
    @NamedArgument(Name = "string")
    var stringArgument: String = ""
  }


  class ArgsOneOptionalStringArgument extends ArgumentsBase {
    @NamedArgument(Name = "string", Required = false, DefaultValue = "default")
    var stringArgument: String = ""
  }


  class ArgsParameters extends ArgumentsBase {
    @NamedArgument(Name = "foo", Required = true)
    var foo: String = ""

    @ParametersArgument(Prefix = "P")
    val paramsP: mutable.ListBuffer[(String, String)] = mutable.ListBuffer.empty

    @ParametersArgument(Prefix = "C")
    val paramsC: mutable.ListBuffer[(String, String)] = mutable.ListBuffer.empty
  }

}

import com.amazon.milan.cmd.TestArgumentsBase._

@Test
class TestArgumentsBase {
  @Test
  def test_ArgumentsBase_Parse_WithLongNameOfStringArgument_ParsesTheArgument(): Unit = {
    val args = new ArgsOneRequiredStringArgument()
    args.parse(Array("--string", "value"))
    assertEquals("value", args.stringArgument)
  }

  @Test(expected = classOf[Exception])
  def test_ArgumentsBase_Parse_WithOneRequiredArgumentNotSupplied_ThrowsException(): Unit = {
    val args = new ArgsOneRequiredStringArgument()
    args.parse(Array())
  }

  @Test
  def test_ArgumentsBase_Parse_WithOneOptionalArgumentNotSupplied_HasDefaultArgumentValue(): Unit = {
    val args = new ArgsOneOptionalStringArgument()
    args.parse(Array())
    assertEquals("default", args.stringArgument)
  }

  @Test
  def test_ArgumentsBase_Parse_WithParametersArgument_SetsParameters(): Unit = {
    val args = new ArgsParameters()
    args.parse(Array("--foo", "foo", "-Pa=b", "-Px=y", "-Cq=t"))
    assertEquals(args.foo, "foo")
    assertEquals(args.paramsP.find(_._1 == "a").get._2, "b")
    assertEquals(args.paramsP.find(_._1 == "x").get._2, "y")
    assertEquals(args.paramsC.find(_._1 == "q").get._2, "t")
  }
} 
Example 174
Source File: TestMilanObjectMapper.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.serialization

import com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException
import org.junit.Assert._
import org.junit.Test


object TestMilanObjectMapper {

  case class Record(i: Int)

  case class Record2(i: Int, s: String)

}

import com.amazon.milan.serialization.TestMilanObjectMapper._


@Test
class TestMilanObjectMapper {
  @Test
  def test_MilanObjectMapper_WithDefaultConfig_WithExtraPropertyInJson_IgnoresExtraProperty(): Unit = {
    val mapper = new MilanObjectMapper(DataFormatConfiguration.default)
    val json = mapper.writeValueAsString(Record2(2, "foo"))
    val o = mapper.readValue[Record](json, classOf[Record])
    assertEquals(2, o.i)
  }

  @Test(expected = classOf[UnrecognizedPropertyException])
  def test_MilanObjectMapper_WithFailOnUnknownProperties_WithExtraPropertyInJson_ThrowsUnrecognizedPropertyException(): Unit = {
    val mapper = new MilanObjectMapper(DataFormatConfiguration.withFlags(DataFormatFlags.FailOnUnknownProperties))
    val json = mapper.writeValueAsString(Record2(2, "foo"))
    val o = mapper.readValue[Record](json, classOf[Record])
  }
} 
Example 175
Source File: TestTypeDescriptor.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.typeutil

import com.amazon.milan.serialization.MilanObjectMapper
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.Test


object TestTypeDescriptor {

  case class Record()

  case class GenericRecord[T]()

}

import com.amazon.milan.typeutil.TestTypeDescriptor._


class TestTypeDescriptor {
  @Test
  def test_TypeDescriptor_Of_WithNonGenericRecordType_CreatesExpectedRecordTypeDescriptor(): Unit = {
    val target = TypeDescriptor.of[Record]
    assertTrue(target.genericArguments.isEmpty)
    assertEquals("com.amazon.milan.typeutil.TestTypeDescriptor.Record", target.typeName)
  }

  @Test
  def test_TypeDescriptor_Of_WithGenericRecordType_CreatesExpectedRecordTypeDescriptor(): Unit = {
    val target = TypeDescriptor.of[GenericRecord[Int]]
    assertEquals("com.amazon.milan.typeutil.TestTypeDescriptor.GenericRecord", target.typeName)
    assertEquals(1, target.genericArguments.length)
    assertEquals("Int", target.genericArguments.head.typeName)
  }

  @Test
  def test_TypeDescriptor_Of_WithGenericRecordTypeWithGenericArg_CreatesExpectedRecordTypeDescriptor(): Unit = {
    val target = TypeDescriptor.of[GenericRecord[List[Int]]]
    assertEquals("com.amazon.milan.typeutil.TestTypeDescriptor.GenericRecord", target.typeName)

    assertEquals(1, target.genericArguments.length)

    val genericArg = target.genericArguments.head
    assertEquals("List", genericArg.typeName)

    assertEquals(1, genericArg.genericArguments.length)
    assertEquals("Int", genericArg.genericArguments.head.typeName)
  }

  @Test
  def test_TypeDescriptor_Create_WithTupleType_ReturnsTypeDescriptorWithExpectedGenericArguments(): Unit = {
    val target = TypeDescriptor.forTypeName[Any]("Tuple3[Int, String, Long]")
    assertEquals("Tuple3", target.typeName)
    assertEquals(List(TypeDescriptor.of[Int], TypeDescriptor.of[String], TypeDescriptor.of[Long]), target.genericArguments)
  }

  @Test
  def test_TypeDescriptor_ForTypeName_ThenIsNumeric_WithNumericTypeNames_ReturnsTrue(): Unit = {
    val numericTypeNames = Seq("Double", "Float", "Int", "Long")
    numericTypeNames.foreach(name => assertTrue(TypeDescriptor.forTypeName[Any](name).isNumeric))
  }

  @Test
  def test_TupleTypeDescriptor_JsonSerializerAndDeserialize_ReturnsEquivalentType(): Unit = {
    val original = TypeDescriptor.of[(Int, String)]
    assertTrue(original.isTuple)

    val copy = MilanObjectMapper.copy(original)
    assertTrue(copy.isTuple)
    assertEquals(original, copy)
  }

  @Test
  def test_TypeDescriptor_NamedTupleOf_ReturnsExpectedFields(): Unit = {
    val actual = TypeDescriptor.namedTupleOf[(Int, String, Long)]("a", "b", "c")
    val expectedGenericArgs = List(types.Int, types.String, types.Long)
    val expectedFields = List(
      FieldDescriptor("a", types.Int),
      FieldDescriptor("b", types.String),
      FieldDescriptor("c", types.Long)
    )
    val expected = new TupleTypeDescriptor[(Int, String, Long)]("Tuple3", expectedGenericArgs, expectedFields)
    assertEquals(expected, actual)
  }

  @Test
  def test_TypeDescriptor_OfTuple_DoesntHaveFields(): Unit = {
    val target = TypeDescriptor.of[(Int, String)]
    assertTrue(target.fields.isEmpty)
  }
} 
Example 176
Source File: TestTypeJoiner.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.typeutil

import org.junit.Assert._
import org.junit.Test


@Test
class TestTypeJoiner {
  @Test
  def test_TypeJoiner_WithTwoObjectTypes_ReturnsTupleWithTwoFields(): Unit = {
    val joiner = createTypeJoiner[Int, String]
    val joined = joiner.getOutputType(types.Int, types.String)
    val expected = TypeDescriptor.namedTupleOf[(Int, String)]("left", "right")
    assertEquals(expected, joined)
  }

  @Test
  def test_TypeJoiner_WithTwoTuples_ReturnsTupleWithCombinedFields(): Unit = {
    val joiner = createTypeJoiner[(Int, String), (Long, Double)]

    val leftType = TypeDescriptor.namedTupleOf[(Int, String)]("a", "b")
    val rightType = TypeDescriptor.namedTupleOf[(Long, Double)]("c", "d")
    val joined = joiner.getOutputType(leftType, rightType)

    val expected = TypeDescriptor.namedTupleOf[(Int, String, Long, Double)]("a", "b", "c", "d")
    assertEquals(expected, joined)
  }

  @Test
  def test_TypeJoiner_WithTwoTuples_WithOneNameCollision_AddsPrefixToRightFieldInOutput(): Unit = {
    val joiner = createTypeJoiner[(Int, String), (Long, Double)]

    val leftType = TypeDescriptor.namedTupleOf[(Int, String)]("a", "b")
    val rightType = TypeDescriptor.namedTupleOf[(Long, Double)]("b", "c")
    val joined = joiner.getOutputType(leftType, rightType)

    val expected = TypeDescriptor.namedTupleOf[(Int, String, Long, Double)]("a", "b", "right_b", "c")
    assertEquals(expected, joined)
  }

  @Test
  def test_TypeJoiner_WithLeftObjectAndRightTuple_ReturnsTupleWithCombinedFields(): Unit = {
    val joiner = createTypeJoiner[Int, (Long, Double)]

    val leftType = types.Int
    val rightType = TypeDescriptor.namedTupleOf[(Long, Double)]("c", "d")
    val joined = joiner.getOutputType(leftType, rightType)

    val expected = TypeDescriptor.namedTupleOf[(Int, Long, Double)]("left", "c", "d")
    assertEquals(expected, joined)
  }

  @Test
  def test_TypeJoiner_WithLeftTupleAndRightObject_ReturnsTupleWithCombinedFields(): Unit = {
    val joiner = createTypeJoiner[(Int, String), Double]

    val leftType = TypeDescriptor.namedTupleOf[(Int, String)]("a", "b")
    val rightType = types.Double
    val joined = joiner.getOutputType(leftType, rightType)

    val expected = TypeDescriptor.namedTupleOf[(Int, String, Double)]("a", "b", "right")
    assertEquals(expected, joined)
  }
} 
Example 177
Source File: TestTypeUtil.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.typeutil

import org.junit.Assert.assertEquals
import org.junit.Test


@Test
class TestTypeUtil {
  @Test
  def test_TypeUtil_GetGenericArgumentTypeNames_WithThreeBasicTypes_ReturnsExpectedListOfTypeNames(): Unit = {
    val names = getGenericArgumentTypeNames("(Int, String, Float)")
    assertEquals(List("Int", "String", "Float"), names)
  }

  @Test
  def test_TypeUtil_GetGenericArgumentTypeNames_WithNestedGenericTypes_ReturnsExpectedListOfTypeNames(): Unit = {
    val typeName = "Tuple2[Int, com.amazon.milan.test.Tuple3Record[Int, Int, Int]]"
    val genericArgNames = getGenericArgumentTypeNames(typeName)
    assertEquals(List("Int", "com.amazon.milan.test.Tuple3Record[Int, Int, Int]"), genericArgNames)
  }
} 
Example 178
Source File: TestFlinkGenArgCompare.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.application.ApplicationConfiguration
import com.amazon.milan.compiler.flink.testing._
import com.amazon.milan.compiler.flink.testutil._
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import org.junit.Assert._
import org.junit.Test


@Test
class TestFlinkGenArgCompare {
  @Test
  def test_FlinkGenArgCompare_MaxBy_OfNonKeyedStream_OutputsLargestRecord(): Unit = {
    val input = Stream.of[IntKeyValueRecord]
    val output = input.maxBy(_.value)

    val graph = new StreamGraph(output)

    // We don't have fine control over the ordering of the data as it flows through and out of the Flink application,
    // but we can be fairly sure that our maxBy operation shouldn't output *all* of the input records if the max record
    // appears early on, so that's what we'll test for.

    val inputRecords =
      List(IntKeyValueRecord(1, 1), IntKeyValueRecord(2, 5)) ++
        List.tabulate(100)(_ => IntKeyValueRecord(3, 3))

    val config = new ApplicationConfiguration
    config.setListSource(input, inputRecords: _*)

    val results = TestApplicationExecutor.executeApplication(graph, config, 60, output)

    val outputRecords = results.getRecords(output)

    // We should prevent at least some of the input records from getting to the output.
    assertTrue(outputRecords.length < inputRecords.length)

    // The max record should appear in the output.
    val maxOutputRecord = outputRecords.maxBy(_.value)
    assertEquals(IntKeyValueRecord(2, 5), maxOutputRecord)
  }

  @Test
  def test_FlinkGenArgCompare_MaxBy_InFlatMapOfGroupBy_OutputsLargestAsLastRecordPerKey(): Unit = {
    val input = Stream.of[IntKeyValueRecord]

    def maxByValue(stream: Stream[IntKeyValueRecord]): Stream[IntKeyValueRecord] = {
      stream.maxBy(r => r.value).last()
    }

    val output = input.groupBy(r => r.key).flatMap((key, group) => maxByValue(group))

    val graph = new StreamGraph(output)

    val data = generateIntKeyValueRecords(100, 5, 100)

    val config = new ApplicationConfiguration
    config.setListSource(input, data: _*)

    val results = TestApplicationExecutor.executeApplication(graph, config, 60, output)

    val outputRecords = results.getRecords(output)
    val lastOutputRecordPerKey = outputRecords.groupBy(_.key).map { case (key, group) => key -> group.last }
    val expectedLastOutputRecordPerKey = data.groupBy(_.key).map { case (key, group) => key -> group.maxBy(_.value) }
    assertEquals(expectedLastOutputRecordPerKey, lastOutputRecordPerKey)
  }
} 
Example 179
Source File: TestFlinkGenerator.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.application.ApplicationConfiguration
import com.amazon.milan.application.sources.S3DataSource
import com.amazon.milan.dataformats.JsonDataInputFormat
import com.amazon.milan.compiler.flink.testing.{IntRecord, TestApplicationExecutor}
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import org.junit.Assert._
import org.junit.Test

import scala.reflect.runtime.universe
import scala.tools.reflect.ToolBox


@Test
class TestFlinkGenerator {
  private val generator = new FlinkGenerator(GeneratorConfig())

  @Test
  def test_FlinkGenerator_GenerateScala_WithListSourceAndMapOfOneRecord_GeneratesCodeThatCompilesAndOutputsMappedRecord(): Unit = {
    val input = Stream.of[IntRecord].withName("input")
    val output = input.map(r => IntRecord(r.i + 1)).withName("output")

    val graph = new StreamGraph(output)
    val config = new ApplicationConfiguration
    config.setListSource(input, IntRecord(1))

    val result = TestApplicationExecutor.executeApplication(graph, config, 10, output)
    val outputRecords = result.getRecords(output)
    assertEquals(List(IntRecord(2)), outputRecords)
  }

  @Test
  def test_FlinkGenerator_GenerateScala_WithS3DataSource_GeneratesCodeThatCompiles(): Unit = {
    val input = Stream.of[IntRecord].withName("input")
    val output = input.map(r => IntRecord(r.i + 1)).withName("output")

    val graph = new StreamGraph(output)
    val config = new ApplicationConfiguration
    config.setSource(input, new S3DataSource[IntRecord]("bucket", "prefix", new JsonDataInputFormat[IntRecord]()))

    val generatedCode = this.generator.generateScala(graph, config, "", "TestApp")

    this.eval(generatedCode)
  }

  private def eval(code: String): Any = {
    try {
      val tb = ToolBox(universe.runtimeMirror(this.getClass.getClassLoader)).mkToolBox()
      val tree = tb.parse(code)
      tb.eval(tree)
    }
    catch {
      case ex: Throwable =>
        Console.println(code)
        throw ex
    }
  }
} 
Example 180
Source File: TestFlinkGenLast.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.application.ApplicationConfiguration
import com.amazon.milan.compiler.flink.testing._
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import org.junit.Assert._
import org.junit.Test

import scala.util.Random


@Test
class TestFlinkGenLast {
  @Test
  def test_FlinkGenLast_InFlatMapOfGroupBy_WithOneGroupKeyInInputRecords_OutputsOnlyLastInputRecordToOutput(): Unit = {
    val input = Stream.of[IntKeyValueRecord].withName("input")
    val grouped = input.groupBy(r => r.key)

    def maxByValueAndLast(stream: Stream[IntKeyValueRecord]): Stream[IntKeyValueRecord] =
      stream.maxBy(r => r.value).last()

    val output = grouped.flatMap((key, group) => maxByValueAndLast(group)).withName("output")

    val graph = new StreamGraph(output)

    val config = new ApplicationConfiguration
    config.setListSource(input, IntKeyValueRecord(1, 1), IntKeyValueRecord(1, 3), IntKeyValueRecord(1, 2))

    // Keep running until we find records in the output file.
    val results = TestApplicationExecutor.executeApplication(
      graph,
      config,
      20,
      r => r.getRecords(output).isEmpty,
      output)

    val outputRecords = results.getRecords(output)
    assertEquals(List(IntKeyValueRecord(1, 3)), outputRecords)
  }

  @Test
  def test_FlinkGenLast_InFlatMapOfGroupBy_With10GroupKeysInInputRecords_With10RecordsPerGroupKey_OutputsOnlyLastRecordInInputForEachGroupKey(): Unit = {
    val input = Stream.of[IntKeyValueRecord].withName("input")
    val grouped = input.groupBy(r => r.key)

    def maxByValueAndLast(stream: Stream[IntKeyValueRecord]): Stream[IntKeyValueRecord] =
      stream.maxBy(r => r.value).last()

    val output = grouped.flatMap((key, group) => maxByValueAndLast(group)).withName("output")

    val graph = new StreamGraph(output)

    val inputRecords = Random.shuffle(List.tabulate(10)(group => List.tabulate(10)(i => IntKeyValueRecord(group, i))).flatten)
    val config = new ApplicationConfiguration
    config.setListSource(input, inputRecords: _*)

    val results = TestApplicationExecutor.executeApplication(
      graph,
      config,
      20,
      r => r.getRecords(output).length < 10,
      output)

    val outputRecords = results.getRecords(output).sortBy(_.key)
    val expectedOutputRecords = List.tabulate(10)(i => inputRecords.filter(_.key == i).maxBy(_.value))
    assertEquals(expectedOutputRecords, outputRecords)
  }
} 
Example 181
Source File: TestFlinkGenNamedFields.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.application.ApplicationConfiguration
import com.amazon.milan.compiler.flink.testing.{IntRecord, TestApplicationExecutor}
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import org.junit.Assert._
import org.junit.Test


@Test
class TestFlinkGenNamedFields {
  @Test
  def test_FlinkGenNamedFields_MapWithNamedFieldOutput_OutputsTupleObjects(): Unit = {
    val input = Stream.of[IntRecord]
    val output = input.map(r => fields(
      field("a", r.i + 1),
      field("b", r.i + 2),
      field("c", r.i.toString)
    ))

    val graph = new StreamGraph(output)
    val config = new ApplicationConfiguration
    config.setListSource(input, IntRecord(1))

    val results = TestApplicationExecutor.executeApplication(
      graph,
      config,
      20,
      output)

    val outputRecords = results.getRecords(output)
    val expectedOutputRecords = List((2, 3, "1"))

    assertEquals(expectedOutputRecords, outputRecords)
  }
} 
Example 182
Source File: TestFlinkGenWindowedJoin.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.compiler.flink.testing.{DateKeyValueRecord, IntKeyValueRecord, IntRecord}
import com.amazon.milan.lang._
import org.junit.Test


object TestFlinkGenWindowedJoin {
  def processWindow(leftRecord: IntRecord, window: Iterable[DateKeyValueRecord]): IntKeyValueRecord = {
    window.find(_.key == leftRecord.i) match {
      case Some(rightRecord) =>
        IntKeyValueRecord(leftRecord.i, rightRecord.value)

      case None =>
        // TODO: Find something better to do.
        throw new Exception("Window is empty.")
    }
  }
}


@Test
class TestFlinkGenWindowedJoin {
  @Test
  def test_FlinkGenWindowedJoin(): Unit = {
    // TODO: Finish this.
    val leftInput = Stream.of[IntRecord]
    val rightInput = Stream.of[DateKeyValueRecord]

    def maxByDateTime(stream: Stream[DateKeyValueRecord]): Stream[DateKeyValueRecord] =
      stream.maxBy(r => r.dateTime)

    val rightWindow = rightInput.groupBy(r => r.key).map((_, group) => maxByDateTime(group))
  }

} 
Example 183
Source File: TestFlinkGenCycle.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.application.ApplicationConfiguration
import com.amazon.milan.compiler.flink.testing.{IntRecord, TestApplicationExecutor}
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import org.junit.Assert._
import org.junit.Test


@Test
class TestFlinkGenCycle {
  @Test
  def test_FlinkGenCycle_WithLoopThatAddsOneEachTimeAndStopsAtTen_OutputsTenRecords(): Unit = {
    val input = Stream.of[IntRecord]
    val cycle = input.beginCycle()
    val addOne = cycle.map(r => IntRecord(r.i + 1))
    val output = addOne.where(r => r.i <= 10)

    cycle.closeCycle(output)

    val config = new ApplicationConfiguration
    config.setListSource(input, true, IntRecord(1))

    val graph = new StreamGraph(output)

    val results = TestApplicationExecutor.executeApplication(
      graph,
      config,
      60,
      intermediateResults => intermediateResults.getRecords(output).length < 9,
      output)

    val outputRecords = results.getRecords(output).sortBy(_.i)
    val expectedOutputRecords = List.tabulate(9)(i => IntRecord(i + 2))

    assertEquals(expectedOutputRecords, outputRecords)
  }
} 
Example 184
Source File: TestFlinkGenFilter.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.generator

import com.amazon.milan.application.ApplicationConfiguration
import com.amazon.milan.compiler.flink.testing.{IntRecord, TestApplicationExecutor, TwoIntRecord}
import com.amazon.milan.lang._
import com.amazon.milan.testing.applications._
import org.junit.Assert.assertEquals
import org.junit.Test


@Test
class TestFlinkGenFilter {
  @Test
  def test_FlinkGenFilter_WithFilterOnObjectStream_OutputsExpectedRecords(): Unit = {
    val stream = Stream.of[IntRecord]
    val filtered = stream.where(r => r.i == 3)

    val graph = new StreamGraph(filtered)

    val config = new ApplicationConfiguration()
    config.setListSource(stream, IntRecord(1), IntRecord(2), IntRecord(3), IntRecord(4))

    val results = TestApplicationExecutor.executeApplication(graph, config, 60, filtered)

    val actualOutput = results.getRecords(filtered)
    assertEquals(List(IntRecord(3)), actualOutput)
  }

  @Test
  def test_FlinkGenFilter_WithFilterOnTupleStream_OutputsExpectedRecords(): Unit = {
    val stream = Stream.of[TwoIntRecord]
    val tupleStream = stream.map(r => fields(field("a", r.a), field("b", r.b)))
    val filtered = tupleStream.where { case (a, b) => a == b }

    val graph = new StreamGraph(filtered)

    val config = new ApplicationConfiguration()
    config.setListSource(stream, TwoIntRecord(1, 2), TwoIntRecord(2, 2))

    val results = TestApplicationExecutor.executeApplication(graph, config, 60, filtered)

    val actualOutput = results.getRecords(filtered)
    assertEquals(List((2, 2)), actualOutput)
  }
} 
Example 185
Source File: TestScalaTupleTypeInformation.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.types

import java.time.Instant

import com.amazon.milan.compiler.flink.testing._
import com.amazon.milan.compiler.flink.testutil._
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.junit.Assert._
import org.junit.Test


@Test
class TestScalaTupleTypeInformation {
  @Test
  def test_ScalaTupleTypeInformation_WithTupleOfIntAndString_CanSerializerAndDeserializeAndProduceEquivalentObject(): Unit = {
    val typeInfo = new ScalaTupleTypeInformation[(Int, String)](Array(createTypeInformation[Int], createTypeInformation[String]))
    val typeInfoCopy = ObjectStreamUtil.serializeAndDeserialize(typeInfo)
    assertEquals(typeInfo, typeInfoCopy)
  }

  @Test
  def test_ScalaTupleTypeInformation_WithTupleOfComplexTypes_CanSerializerAndDeserializeAndProduceEquivalentObject(): Unit = {
    val typeInfo = new ScalaTupleTypeInformation[(IntRecord, DateKeyValueRecord)](Array(createTypeInformation[IntRecord], createTypeInformation[DateKeyValueRecord]))
    val typeInfoCopy = ObjectStreamUtil.serializeAndDeserialize(typeInfo)
    assertEquals(typeInfo, typeInfoCopy)
  }

  @Test
  def test_ScalaTupleTypeInformation_CreateSerializer_ReturnsSerializerThatCanSerializeAndDeserializeObjects(): Unit = {
    val typeInfo = new ScalaTupleTypeInformation[(Int, String)](Array(createTypeInformation[Int], createTypeInformation[String]))

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val serializer = typeInfo.createSerializer(env.getConfig)

    val original = (2, "foo")
    val copy = copyWithSerializer(original, serializer)

    assertEquals(original, copy)
  }

  @Test
  def test_ScalaTupleTypeInformation_CreateSerializer_WithComplexTypes_ReturnsSerializerThatCanSerializeAndDeserializeObjects(): Unit = {
    val typeInfo = new ScalaTupleTypeInformation[(IntRecord, DateKeyValueRecord)](Array(createTypeInformation[IntRecord], createTypeInformation[DateKeyValueRecord]))

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val serializer = typeInfo.createSerializer(env.getConfig)

    val original = (IntRecord(2), DateKeyValueRecord(Instant.now(), 5, 10))
    val copy = copyWithSerializer(original, serializer)

    assertEquals(original, copy)
  }
} 
Example 186
Source File: TestArrayRecordTypeInformation.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.types

import com.amazon.milan.compiler.flink.testutil.ObjectStreamUtil
import org.apache.flink.api.scala._
import org.junit.Assert._
import org.junit.Test


@Test
class TestArrayRecordTypeInformation {
  @Test
  def test_ArrayRecordTypeInformation_CanSerializeAndDeserializeViaObjectStream(): Unit = {
    val original = new ArrayRecordTypeInformation(Array(
      FieldTypeInformation("a", createTypeInformation[Int]),
      FieldTypeInformation("b", createTypeInformation[String])))
    val copy = ObjectStreamUtil.serializeAndDeserialize(original)
    assertEquals(original, copy)
  }
} 
Example 187
Source File: TestPriorityQueueTypeSerializer.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.types

import com.amazon.milan.compiler.flink.runtime.SequenceNumberOrdering
import com.amazon.milan.compiler.flink.testing.IntRecord
import com.amazon.milan.compiler.flink.testutil._
import com.amazon.milan.compiler.flink.types
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.junit.Assert._
import org.junit.Test

import scala.collection.mutable
import scala.util.Random


@Test
class TestPriorityQueueTypeSerializer {
  @Test
  def test_PriorityQueueTypeSerializer_Deserialize_WithQueueOfInt_With100RandomItems_ReturnsQueueThatYieldsSameItemsAsOriginal(): Unit = {
    val typeInfo = new PriorityQueueTypeInformation[Int](createTypeInformation[Int], Ordering.Int)
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val serializer = typeInfo.createSerializer(env.getConfig)

    val original = new mutable.PriorityQueue[Int]()
    val rand = new Random(0)
    val values = List.tabulate(100)(_ => rand.nextInt(100))
    original.enqueue(values: _*)

    val copy = copyWithSerializer(original, serializer)

    assertEquals(original.length, copy.length)
    assertEquals(original.dequeueAll.toList, copy.dequeueAll.toList)
  }

  @Test
  def test_PriorityQueueTypeSerializer_Deserialize_AfterRestoring_WithQueueOfInt_With100RandomItems_ReturnsQueueThatYieldsSameItemsAsOriginal(): Unit = {
    val typeInfo = new PriorityQueueTypeInformation[Int](createTypeInformation[Int], Ordering.Int)
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val serializer = typeInfo.createSerializer(env.getConfig)

    val snapshot = serializer.snapshotConfiguration()
    val snapshotCopy = new types.PriorityQueueTypeSerializer.Snapshot[Int]()
    copyData(snapshot.writeSnapshot, input => snapshotCopy.readSnapshot(snapshot.getCurrentVersion, input, getClass.getClassLoader))

    val serializerCopy = snapshotCopy.restoreSerializer()

    val original = new mutable.PriorityQueue[Int]()
    val rand = new Random(0)
    val values = List.tabulate(100)(_ => rand.nextInt(100))
    original.enqueue(values: _*)

    val copy = copyData(
      output => serializer.serialize(original, output),
      input => serializerCopy.deserialize(input))

    assertEquals(original.length, copy.length)
    assertEquals(original.dequeueAll.toList, copy.dequeueAll.toList)
  }

  @Test
  def test_PriorityQueueTypeSerializer_Deserialize_AfterRestoring_WithQueueOfRecordWrapperAndSequenceNumberOrdering_With100RandomItems_ReturnsQueueThatYieldsSameItemsAsOriginal(): Unit = {
    val ordering = new SequenceNumberOrdering[IntRecord, Product]
    val typeInfo = new PriorityQueueTypeInformation[RecordWrapper[IntRecord, Product]](
      RecordWrapperTypeInformation.wrap(createTypeInformation[IntRecord]),
      ordering)
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val serializer = typeInfo.createSerializer(env.getConfig)

    val snapshot = serializer.snapshotConfiguration()
    val snapshotCopy = new types.PriorityQueueTypeSerializer.Snapshot[RecordWrapper[IntRecord, Product]]()
    copyData(snapshot.writeSnapshot, input => snapshotCopy.readSnapshot(snapshot.getCurrentVersion, input, getClass.getClassLoader))

    val serializerCopy = snapshotCopy.restoreSerializer()

    val original = new mutable.PriorityQueue[RecordWrapper[IntRecord, Product]]()(ordering)
    val rand = new Random(0)
    val values = List.tabulate(100)(i => RecordWrapper.wrap(IntRecord(rand.nextInt(100)), i.toLong))
    original.enqueue(values: _*)

    val copy = copyData(
      output => serializer.serialize(original, output),
      input => serializerCopy.deserialize(input))

    assertEquals(original.length, copy.length)
    assertEquals(original.dequeueAll.toList, copy.dequeueAll.toList)
  }
} 
Example 188
Source File: TestTreeArgumentSplitter.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.program.Tree
import org.junit.Assert._
import org.junit.Test


object TestTreeArgumentSplitter {

  case class Record(i: Int)

  def functionOfRecord(r: Record): Boolean = true

  def functionOfInt(i: Int): Boolean = true
}

import com.amazon.milan.compiler.flink.internal.TestTreeArgumentSplitter._


@Test
class TestTreeArgumentSplitter {
  @Test
  def test_TreeArgumentSplitter_SplitTree_WithTwoInputRecordsAndOneEqualsStatementForEachRecord_SplitsTheExpression(): Unit = {
    val tree = Tree.fromExpression((a: Record, b: Record) => a.i == 1 && b.i == 2)
    val result = TreeArgumentSplitter.splitTree(tree, "a")
    assertEquals(Some(Tree.fromExpression((a: Record, b: Record) => a.i == 1)), result.extracted)
    assertEquals(Some(Tree.fromExpression((a: Record, b: Record) => b.i == 2)), result.remainder)
  }

  @Test
  def test_TreeArgumentSplitter_SplitTree_WithTwoInputRecordsAndOneFunctionCallForEachRecord_SplitsTheExpression(): Unit = {
    val tree = Tree.fromExpression((a: Record, b: Record) => functionOfRecord(a) && functionOfInt(b.i))
    val result = TreeArgumentSplitter.splitTree(tree, "a")
    assertEquals(Some(Tree.fromExpression((a: Record, b: Record) => functionOfRecord(a))), result.extracted)
    assertEquals(Some(Tree.fromExpression((a: Record, b: Record) => functionOfInt(b.i))), result.remainder)
  }
} 
Example 189
Source File: TestJoinKeyExpressionExtractor.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.program.Tree
import org.junit.Assert._
import org.junit.Test


object TestJoinKeyExpressionExtractor {

  case class Record(a: Int, b: String, c: Long, d: String)

  def functionOfRecord(r: Record): Boolean = true
}

import com.amazon.milan.compiler.flink.internal.TestJoinKeyExpressionExtractor._


@Test
class TestJoinKeyExpressionExtractor {
  @Test
  def test_JoinKeyExpressionExtractor_ExtractJoinKeyExpression_WithSingleFieldEqualityCheck_ReturnsInputExpressionAsExtractedAndNoRemainder(): Unit = {
    val tree = Tree.fromExpression((l: Record, r: Record) => l.a == r.a)
    val result = JoinKeyExpressionExtractor.extractJoinKeyExpression(tree)
    assertEquals(Some(tree), result.extracted)
    assertTrue(result.remainder.isEmpty)
  }

  @Test
  def test_JoinKeyExpressionExtractor_ExtractJoinKeyExpression_WithTwoFieldEqualityChecks_ReturnsInputExpressionAsExtractedAndNoRemainder(): Unit = {
    val tree = Tree.fromExpression((l: Record, r: Record) => (l.a == r.a) && (l.b == r.b))
    val result = JoinKeyExpressionExtractor.extractJoinKeyExpression(tree)
    assertEquals(Some(tree), result.extracted)
    assertTrue(result.remainder.isEmpty)
  }

  @Test
  def test_JoinKeyExpressionExtractor_ExtractJoinKeyExpression_WithFourFieldEqualityChecks_ReturnsInputExpressionAsExtractedAndNoRemainder(): Unit = {
    val tree = Tree.fromExpression((l: Record, r: Record) => (l.a == r.a) && (l.b == r.b) && (l.c == r.c) && (l.d == r.d))
    val result = JoinKeyExpressionExtractor.extractJoinKeyExpression(tree)
    assertEquals(Some(tree), result.extracted)
    assertTrue(result.remainder.isEmpty)
  }

  @Test
  def test_JoinKeyExpressionExtractor_ExtractJoinKeyExpression_WithFieldEqualityCheckAndFunctionOfOneInput_ReturnsEqualityCheckAsExtractedAndFunctionCallAsRemainder(): Unit = {
    val tree = Tree.fromExpression((l: Record, r: Record) => (l.a == r.a) && functionOfRecord(l))
    val result = JoinKeyExpressionExtractor.extractJoinKeyExpression(tree)
    assertEquals(Some(Tree.fromExpression((l: Record, r: Record) => l.a == r.a)), result.extracted)
    assertEquals(Some(Tree.fromExpression((l: Record, r: Record) => functionOfRecord(l))), result.remainder)
  }
} 
Example 190
Source File: TestContextualTreeTransformer.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.compiler.flink.testing.{IntRecord, KeyValueRecord, StringRecord}
import com.amazon.milan.program._
import com.amazon.milan.typeutil.{TypeDescriptor, types}
import org.junit.Assert._
import org.junit.Test


@Test
class TestContextualTreeTransformer {
  @Test
  def test_ContextualTreeTransformer_Transform_ForFunctionWithOneArgumentThatReturnsThatArgument_ReturnsIdenticalFunction(): Unit = {
    val function = FunctionDef(List(ValueDef("x", TypeDescriptor.of[IntRecord])), SelectTerm("x"))
    val transformed = ContextualTreeTransformer.transform(function)
    assertEquals(function, transformed)
  }

  @Test
  def test_ContextualTreeTransformer_Transform_ForFunctionWithOneTupleArgumentWithUnpackToTwoValues_ReturnsFunctionWithSelectFieldInsteadOfUnpack(): Unit = {
    val inputType = TypeDescriptor.createNamedTuple[(Int, Long)](List(("intField", TypeDescriptor.of[Int]), ("longField", TypeDescriptor.of[Long])))
    val function = FunctionDef(List(ValueDef("x", inputType)), Unpack(SelectTerm("x"), List("a", "b"), SelectTerm("b")))
    val transformed = ContextualTreeTransformer.transform(function)
    val FunctionDef(List(ValueDef("x", _)), SelectField(SelectTerm("x"), "longField")) = transformed
  }

  @Test
  def test_ContextualTreeTransformer_Transform_ForFunctionWithOneObjectAndOneTupleArgumentThatUnpacksTheTupleArgument_ReturnsFunctionWithUnpackRemoved(): Unit = {
    val input1 = TypeDescriptor.of[IntRecord]
    val input2 = TypeDescriptor.createNamedTuple[(IntRecord, StringRecord)](List(("first", TypeDescriptor.of[IntRecord]), ("second", TypeDescriptor.of[StringRecord])))
    val function = FunctionDef(List(ValueDef("a", input1), ValueDef("b", input2)), Unpack(SelectTerm("b"), List("b1", "b2"), Equals(SelectField(SelectTerm("a"), "i"), SelectField(SelectTerm("b1"), "i"))))

    val transformed = ContextualTreeTransformer.transform(function)
    val FunctionDef(List(ValueDef("a", _), ValueDef("b", _)), Equals(SelectField(SelectTerm("a"), "i"), SelectField(SelectField(SelectTerm("b"), "first"), "i"))) = transformed
  }

  @Test
  def test_ContextualTreeTransformer_Transform_WithTwoArgumentFunction_WithOneRecordAndOneTupleArgument_ThatUnpacksTupleArgumentAndUsesAllFields_ReturnsExpectedTree(): Unit = {
    val inputTypes = List(
      TypeDescriptor.of[IntRecord],
      TypeDescriptor.createNamedTuple[(IntRecord, IntRecord)](List(("a", TypeDescriptor.of[IntRecord]), ("b", TypeDescriptor.of[IntRecord]))))
    val function = Tree.fromFunction((c: KeyValueRecord, ab: (KeyValueRecord, KeyValueRecord)) => ab match {
      case (a, b) => a != null && b != null && c != null
    })
      .withArgumentTypes(inputTypes)

    val transformed = ContextualTreeTransformer.transform(function)
    val FunctionDef(List(ValueDef("c", _), ValueDef("ab", _)), And(And(Not(IsNull(SelectField(SelectTerm("ab"), "a"))), Not(IsNull(SelectField(SelectTerm("ab"), "b")))), Not(IsNull(SelectTerm("c"))))) = transformed
  }

  @Test
  def test_ContextualTreeTransformer_Transform_WithTupleArgumentWithoutFieldNamesAndSelectTermFromUnpack_ReturnsTreeUsingFlinkTupleField(): Unit = {
    val inputTypes = List(TypeDescriptor.createTuple[(Int, String)](List(types.Int, types.String)))
    val function = Tree.fromFunction((t: (Int, String)) => t match {
      case (i, s) => i
    })
      .withArgumentTypes(inputTypes)

    TypeChecker.typeCheck(function)

    val transformed = ContextualTreeTransformer.transform(function)
    val FunctionDef(List(ValueDef("t", _)), TupleElement(SelectTerm("t"), 0)) = transformed
  }
} 
Example 191
Source File: TestJoinPreconditionExtractor.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.program.Tree
import org.junit.Assert._
import org.junit.Test


object TestJoinPreconditionExtractor {

  case class Record(i: Int)

}

import com.amazon.milan.compiler.flink.internal.TestJoinPreconditionExtractor._


@Test
class TestJoinPreconditionExtractor {
  @Test
  def test_JoinPredictionExtractor_ExtractJoinPrecondition_WithNotNullExpression_ReturnsInputExpressionAsRemainder(): Unit = {
    val tree = Tree.fromExpression((r: Record) => r != null)
    val result = JoinPreconditionExtractor.extractJoinPrecondition(tree)
    assertTrue(result.extracted.isEmpty)
    assertEquals(Some(tree), result.remainder)
  }

  @Test
  def test_JoinPredictionExtractor_ExtractJoinPrecondition_WithFieldEqualityExpression_ReturnsInputExpressionAsRemainder(): Unit = {
    val tree = Tree.fromExpression((a: Record, b: Record) => a.i == b.i)
    val result = JoinPreconditionExtractor.extractJoinPrecondition(tree)
    assertTrue(result.extracted.isEmpty)
    assertEquals(Some(tree), result.remainder)
  }

  @Test
  def test_JoinPredictionExtractor_ExtractJoinPrecondition_WithMixedExpression_ReturnsExpectedOutputs(): Unit = {
    val tree = Tree.fromExpression((a: Record, b: Record) => (a != null) && (a.i == b.i) && (a.i == 2) && (b.i == 3))
    val result = JoinPreconditionExtractor.extractJoinPrecondition(tree)
    assertEquals(Some(Tree.fromExpression((a: Record, b: Record) => (a.i == 2) && (b.i == 3))), result.extracted)
    assertEquals(Some(Tree.fromExpression((a: Record, b: Record) => (a != null) && (a.i == b.i))), result.remainder)
  }
} 
Example 192
Source File: TestKeySelectorExtractor.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.compiler.flink.testing.TwoIntRecord
import com.amazon.milan.program.{ConstantValue, FunctionDef, Minus, Plus, SelectField, SelectTerm, Tree, Tuple, ValueDef}
import org.junit.Test


@Test
class TestKeySelectorExtractor {
  @Test
  def test_KeySelectorExtractor_GetKeyTupleFunctions_WithTwoEqualityConditions_ReturnsFunctionsThatProduceExpectedTuples(): Unit = {
    val func = Tree.fromExpression((x: TwoIntRecord, y: TwoIntRecord) => x.a == y.a && x.b + 1 == y.b - 1).asInstanceOf[FunctionDef]
    val (leftFunc, rightFunc) = KeySelectorExtractor.getKeyTupleFunctions(func)

    val FunctionDef(List(ValueDef("x", _)), Tuple(List(SelectField(SelectTerm("x"), "a"), Plus(SelectField(SelectTerm("x"), "b"), ConstantValue(1, _))))) = leftFunc
    val FunctionDef(List(ValueDef("y", _)), Tuple(List(SelectField(SelectTerm("y"), "a"), Minus(SelectField(SelectTerm("y"), "b"), ConstantValue(1, _))))) = rightFunc
  }
} 
Example 193
Source File: TestAggregateFunctionTreeExtractor.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.compiler.flink.testing.TwoIntRecord
import com.amazon.milan.lang.aggregation._
import com.amazon.milan.program._
import com.amazon.milan.typeutil.{createTypeDescriptor, types}
import org.junit.Assert._
import org.junit.Test


@Test
class TestAggregateFunctionTreeExtractor {
  @Test
  def test_AggregateFunctionTreeExtractor_GetAggregateInputFunctions_WithAggregateFunctionMeanMinusMin_ReturnsTwoFunctionsOfTheAggregateFunctionArguments(): Unit = {
    val function = Tree.fromExpression((key: Int, r: TwoIntRecord) => mean(r.a) - min(r.b)).asInstanceOf[FunctionDef]
    TypeChecker.typeCheck(function, List(types.Int, createTypeDescriptor[TwoIntRecord]))

    val functions = AggregateFunctionTreeExtractor.getAggregateInputFunctions(function)
    assertEquals(2, functions.length)

    // These will throw an exception if the pattern doesn't match.
    val FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "a")) = functions.head
    val FunctionDef(List(ValueDef("r", _)), SelectField(SelectTerm("r"), "b")) = functions.last
  }

  @Test
  def test_AggregateFunctionTreeExtractor_GetResultTupleToOutputFunction_WithAggregateFunctionMeanMinusMin_ReturnsFunctionOfResultTupleThatSubtractsTheTupleElements(): Unit = {
    val function = Tree.fromExpression((key: Int, r: TwoIntRecord) => mean(r.a) - min(r.b)).asInstanceOf[FunctionDef]
    TypeChecker.typeCheck(function, List(types.Int, createTypeDescriptor[TwoIntRecord]))

    val output = AggregateFunctionTreeExtractor.getResultTupleToOutputFunction(function)
    val FunctionDef(List(ValueDef("key", _), ValueDef("result", _)), Minus(TupleElement(SelectTerm("result"), 0), TupleElement(SelectTerm("result"), 1))) = output
  }

  @Test
  def test_AggregateFunctionTreeExtractor_GetAggregateFunctionReferences_WithAggregateFunctionMeanMinusMin_ReturnsFunctionReferencesForMeanAndMin(): Unit = {
    val function = Tree.fromExpression((key: Int, r: TwoIntRecord) => mean(r.a) - min(r.b)).asInstanceOf[FunctionDef]
    val exprs = AggregateFunctionTreeExtractor.getAggregateExpressions(function)
    assertTrue(exprs.exists(_.isInstanceOf[Mean]))
    assertTrue(exprs.exists(_.isInstanceOf[Min]))
  }

  @Test
  def test_AggregateFunctionTreeExtractor_GetResultTupleToOutputFunction_WithFunctionContainingSumPlusKey_ReturnsExpectedFunctionOfKeyAndResult(): Unit = {
    val function = Tree.fromExpression((key: Int, r: TwoIntRecord) => sum(r.a) + key).asInstanceOf[FunctionDef]
    TypeChecker.typeCheck(function, List(types.Int, createTypeDescriptor[TwoIntRecord]))

    val mapFunc = AggregateFunctionTreeExtractor.getResultTupleToOutputFunction(function)
    val FunctionDef(List(ValueDef("key", _), ValueDef("result", _)), Plus(TupleElement(SelectTerm("result"), 0), SelectTerm("key"))) = mapFunc
  }

  @Test(expected = classOf[InvalidProgramException])
  def test_AggregateFunctionTreeExtractor_GetAggregateInputFunctions_WithKeyUsedInsideAggregateFunctionArgument_ThrowsInvalidProgramException(): Unit = {
    val function = Tree.fromExpression((key: Int, r: Int) => sum(key)).asInstanceOf[FunctionDef]
    TypeChecker.typeCheck(function, List(types.Int, types.Int))

    AggregateFunctionTreeExtractor.getAggregateInputFunctions(function)
  }
} 
Example 194
Source File: TestFlinkScalarFunctionGenerator.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.internal

import com.amazon.milan.compiler.flink.testing.IntRecord
import com.amazon.milan.compiler.flink.typeutil._
import com.amazon.milan.program._
import com.amazon.milan.typeutil.{TypeDescriptor, types}
import org.junit.Assert._
import org.junit.Test


@Test
class TestFlinkScalarFunctionGenerator {
  @Test
  def test_FlinkScalarFunctionGenerator_WithTwoTupleRecordInputs_ReturnsExpectedCode(): Unit = {
    val input1 = TypeDescriptor.createNamedTuple[(Int, String)](List(("i", types.Int), ("s", types.String))).toTupleRecord
    val input2 = TypeDescriptor.createNamedTuple[(Int, String)](List(("i", types.Int), ("s", types.String))).toTupleRecord
    val tree = FunctionDef(
      List(ValueDef("a", input1), ValueDef("b", input2)),
      ApplyFunction(
        FunctionReference("FakeType", "fakeFunction"),
        List(SelectField(SelectTerm("a"), "i"), SelectField(SelectTerm("b"), "s")),
        types.String))

    val code = FlinkScalarFunctionGenerator.default.getScalaAnonymousFunction(tree)
    val expectedCode = "(a: com.amazon.milan.compiler.flink.types.ArrayRecord, b: com.amazon.milan.compiler.flink.types.ArrayRecord) => FakeType.fakeFunction(a(0).asInstanceOf[Int], b(1).asInstanceOf[String])"
    assertEquals(expectedCode, code)
  }

  @Test
  def test_FlinkScalarFunctionGenerator_WithOneObjectAndOneTupleRecordInput_ReturnsExpectedCode(): Unit = {
    val input1 = TypeDescriptor.of[IntRecord]
    val input2 = TypeDescriptor.createNamedTuple[(Int, String)](List(("i", types.Int), ("s", types.String))).toTupleRecord
    val tree = FunctionDef(
      List(ValueDef("a", input1), ValueDef("b", input2)),
      ApplyFunction(
        FunctionReference("FakeType", "fakeFunction"),
        List(SelectField(SelectTerm("a"), "i"), SelectField(SelectTerm("b"), "s")),
        types.String))

    val code = FlinkScalarFunctionGenerator.default.getScalaAnonymousFunction(tree)
    val expectedCode = "(a: com.amazon.milan.compiler.flink.testing.IntRecord, b: com.amazon.milan.compiler.flink.types.ArrayRecord) => FakeType.fakeFunction(a.i, b(1).asInstanceOf[String])"
    assertEquals(expectedCode, code)
  }

  @Test
  def test_FlinkScalarFunctionGenerator_WithUnpackOfTuple_ReturnsCodeUsingTupleField(): Unit = {
    val inputType = TypeDescriptor.createTuple[(Int, String)](List(types.Int, types.String))
    val tree = FunctionDef(List(ValueDef("t", inputType)), Unpack(SelectTerm("t"), List("i", "s"), SelectTerm("i")))
    TypeChecker.typeCheck(tree)

    val code = FlinkScalarFunctionGenerator.default.getScalaAnonymousFunction(tree)
    val expectedCode = "(t: Tuple2[Int, String]) => t._1"
    assertEquals(expectedCode, code)
  }
} 
Example 195
Source File: TestScanProcessFunctions.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.runtime

import com.amazon.milan.compiler.flink.testing.{SingletonMemorySinkFunction, _}
import com.amazon.milan.compiler.flink.testutil._
import com.amazon.milan.compiler.flink.types.{NoneTypeInformation, RecordWrapper}
import org.apache.flink.api.scala._
import org.junit.Assert._
import org.junit.Test

import scala.collection.JavaConverters._


@Test
class TestScanProcessFunctions {
  @Test
  def test_ScanKeyedProcessFunction_ThatPerformsRollingSum_OutputsExpectedRecords(): Unit = {
    val function = new ScanKeyedProcessFunction[IntKeyValueRecord, Tuple1[Int], Int, IntKeyValueRecord](
      0,
      createTypeInformation[Tuple1[Int]],
      createTypeInformation[Int],
      createTypeInformation[IntKeyValueRecord]) {

      override protected def process(state: Int, key: Tuple1[Int], value: IntKeyValueRecord): (Int, Option[IntKeyValueRecord]) = {
        val sum = state + value.value
        (sum, Some(IntKeyValueRecord(key._1, sum)))
      }
    }

    val data = generateIntKeyValueRecords(1000, 10, 100)

    val env = getTestExecutionEnvironment

    val input = env.fromCollection(data.asJavaCollection, createTypeInformation[IntKeyValueRecord]).wrap(createTypeInformation[IntKeyValueRecord])

    val keySelector = new RecordWrapperKeySelector[IntKeyValueRecord, Tuple1[Int]](createTypeInformation[Tuple1[Int]])
    val keyed =
      input
        .map(new ModifyRecordKeyMapFunction[IntKeyValueRecord, Product, Tuple1[Int]](createTypeInformation[IntKeyValueRecord], createTypeInformation[Tuple1[Int]]) {
          override protected def getNewKey(value: IntKeyValueRecord, key: Product): Tuple1[Int] = Tuple1(value.key)
        })
        .keyBy(keySelector, keySelector.getKeyType)

    val output = keyed.process(function).unwrap(createTypeInformation[IntKeyValueRecord])

    val sink = new SingletonMemorySinkFunction[IntKeyValueRecord]()
    output.addSink(sink)

    env.executeThenWaitFor(() => sink.getRecordCount == 1000, 5)

    val actualOutput = sink.getValues
    val actualFinalRecords = actualOutput.groupBy(_.key).map { case (_, g) => g.last }.toList.sortBy(_.key)
    val expectedFinalRecords = data.groupBy(_.key).map { case (k, g) => IntKeyValueRecord(k, g.map(_.value).sum) }.toList.sortBy(_.key)

    assertEquals(expectedFinalRecords, actualFinalRecords)
  }

  @Test
  def test_ScanProcessFunction_ThatPerformsRollingSum_OutputsExpectedFinalValue(): Unit = {
    val function = new ScanProcessFunction[IntKeyValueRecord, Product, Int, IntRecord](
      0,
      NoneTypeInformation.instance,
      createTypeInformation[Int],
      createTypeInformation[IntRecord]) {
      override protected def process(state: Int, value: IntKeyValueRecord): (Int, Option[IntRecord]) = {
        val sum = state + value.value
        (sum, Some(IntRecord(sum)))
      }
    }

    val recordCount = 10
    val data = generateIntKeyValueRecords(recordCount, 10, 100)

    val env = getTestExecutionEnvironment

    val input = env.fromCollection(data.asJavaCollection, createTypeInformation[IntKeyValueRecord]).wrap(createTypeInformation[IntKeyValueRecord])
    val output = input.process(function).setParallelism(1)

    val sink = new SingletonMemorySinkFunction[RecordWrapper[IntRecord, Product]]()
    output.addSink(sink)

    env.executeThenWaitFor(() => sink.getRecordCount == recordCount, 5)

    val actualOutput = sink.getValues
    val actualFinalRecord = actualOutput.maxBy(_.sequenceNumber).value
    val expectedFinalRecord = IntRecord(data.map(_.value).sum)

    assertEquals(expectedFinalRecord, actualFinalRecord)
  }
} 
Example 196
Source File: TestFileSinkFunction.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.runtime

import java.nio.file.Files

import com.amazon.milan.dataformats.JsonDataOutputFormat
import com.amazon.milan.typeutil._
import org.junit.Assert._
import org.junit.Test

import scala.collection.JavaConverters._


@Test
class TestFileSinkFunction {
  @Test
  def test_FileSinkFunction_WithJsonDataFormat_WithTupleRecordType_WritesOneRecordPerLine(): Unit = {
    val tempFile = Files.createTempFile("test_FileSinkFunction_", ".txt")
    try {
      val dataFormat = new JsonDataOutputFormat[(Int, String)]
      val sinkFunction = new FileSinkFunction[(Int, String)](tempFile.toString, dataFormat)

      sinkFunction.invoke((1, "1"), null)
      sinkFunction.invoke((2, "2"), null)
      sinkFunction.invoke((3, "3"), null)

      val contents = Files.readAllLines(tempFile).asScala.toList
      assertEquals(3, contents.length)
    }
    finally {
      Files.deleteIfExists(tempFile)
    }
  }
} 
Example 197
Source File: TestKeyedLastByOperator.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.flink.runtime

import com.amazon.milan.compiler.flink.testing.{SingletonMemorySinkFunction, _}
import com.amazon.milan.compiler.flink.testutil._
import com.amazon.milan.compiler.flink.types.RecordWrapper
import org.apache.flink.api.scala._
import org.junit.Assert._
import org.junit.Test

import scala.collection.JavaConverters._
import scala.util.Random


@Test
class TestKeyedLastByOperator {
  @Test
  def test_KeyedLastByOperator_WithRandomInputsWithTenKeys_ReturnsOneRecordPerKeyWithMaxValue(): Unit = {
    val operator: KeyedLastByOperator[IntKeyValueRecord, Tuple1[Int]] = new KeyedLastByOperator[IntKeyValueRecord, Tuple1[Int]](createTypeInformation[IntKeyValueRecord], createTypeInformation[Tuple1[Int]]) {
      override protected def takeNewValue(newRecord: RecordWrapper[IntKeyValueRecord, Tuple1[Int]], currentRecord: RecordWrapper[IntKeyValueRecord, Tuple1[Int]]): Boolean = {
        newRecord.value.value > currentRecord.value.value
      }
    }

    val rand = new Random(0)
    val data = List.tabulate(1000)(_ => {
      IntKeyValueRecord(rand.nextInt(10), rand.nextInt(100))
    })

    val env = getTestExecutionEnvironment

    val input = env.fromCollection(data.asJavaCollection, createTypeInformation[IntKeyValueRecord]).wrap(createTypeInformation[IntKeyValueRecord])

    val keySelector = new RecordWrapperKeySelector[IntKeyValueRecord, Tuple1[Int]](createTypeInformation[Tuple1[Int]])
    val keyed =
      input
        .map(new ModifyRecordKeyMapFunction[IntKeyValueRecord, Product, Tuple1[Int]](createTypeInformation[IntKeyValueRecord], createTypeInformation[Tuple1[Int]]) {
          override protected def getNewKey(value: IntKeyValueRecord, key: Product): Tuple1[Int] = Tuple1(value.key)
        })
        .keyBy(keySelector, keySelector.getKeyType)

    val output = keyed.transform(
      "op",
      operator.getProducedType,
      operator)
      .unwrap()

    val sink = new SingletonMemorySinkFunction[IntKeyValueRecord]()
    output.addSink(sink)

    env.executeThenWaitFor(() => sink.getRecordCount >= 10, 5)

    val expectedOutput = data.groupBy(_.key).map { case (_, g) => g.maxBy(_.value) }.toList.sortBy(_.key)
    val actualOutput = sink.getValues.sortBy(_.key)

    assertEquals(expectedOutput, actualOutput)
  }
} 
Example 198
Source File: TestStreamAppMap.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.scala

import com.amazon.milan.compiler.scala.testing.{IntRecord, KeyValueRecord}
import com.amazon.milan.lang
import org.junit.Assert._
import org.junit.Test


@Test
class TestStreamAppMap {
  @Test
  def test_StreamAppMap_OfDataStream_MapsRecords(): Unit = {
    val inputStream = lang.Stream.of[IntRecord].withId("input")
    val outputStream = inputStream.map(r => IntRecord(r.i + 1)).withId("output")

    val compiledFunction = StreamAppTester.compile(outputStream)

    val output = compiledFunction(Stream(IntRecord(1), IntRecord(2))).toList
    assertEquals(List(IntRecord(2), IntRecord(3)), output)
  }

  @Test
  def test_StreamAppMap_ThenFlatMap_OfGroupedStream_MapsGroups(): Unit = {
    val input = lang.Stream.of[KeyValueRecord].withId("input")
    val grouped = input.groupBy(r => r.key)

    def addOne(s: lang.Stream[KeyValueRecord]): lang.Stream[KeyValueRecord] =
      s.map(r => KeyValueRecord(r.key, r.value + 1))

    def sumByValue(s: lang.Stream[KeyValueRecord]): lang.Stream[KeyValueRecord] =
      s.sumBy(r => r.value, (r, sum) => KeyValueRecord(r.key, sum))

    val mapped = grouped.map((_, group) => addOne(group)).withId("mapped")
    val output = mapped.flatMap((_, group) => sumByValue(group)).withId("output")

    val compiledFunction = StreamAppTester.compile(output)

    val inputRecords = KeyValueRecord.generate(100, 5, 10)
    val outputRecords = compiledFunction(inputRecords.toStream).toList

    assertEquals(inputRecords.length, outputRecords.length)

    val lastOutputRecords = outputRecords.groupBy(_.key).map { case (_, g) => g.last }.toList.sortBy(_.key)

    val expectedLastOutputRecords =
      inputRecords
        .groupBy(_.key)
        .map { case (k, g) => k -> g.map(r => KeyValueRecord(r.key, r.value + 1)) }
        .map { case (k, g) => KeyValueRecord(k, g.map(_.value).sum) }
        .toList
        .sortBy(_.key)

    assertEquals(expectedLastOutputRecords, lastOutputRecords)
  }
} 
Example 199
Source File: TestScalarFunctionGenerator.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.scala

import com.amazon.milan.program.{ApplyFunction, CreateInstance, FunctionDef, FunctionReference, SelectField, SelectTerm, TypeChecker, Unpack, ValueDef}
import com.amazon.milan.typeutil.{TypeDescriptor, createTypeDescriptor, types}
import org.junit.Assert.assertEquals
import org.junit.Test


object TestScalarFunctionGenerator {

  case class IntRecord(i: Int)

}

import com.amazon.milan.compiler.scala.TestScalarFunctionGenerator._


@Test
class TestScalarFunctionGenerator {
  @Test
  def test_ScalarFunctionGenerator_WithTwoObjectInputs_ReturnsExpectedCode(): Unit = {
    val input1 = TypeDescriptor.of[IntRecord]
    val input2 = TypeDescriptor.of[IntRecord]
    val tree = FunctionDef(
      List(ValueDef("a", input1), ValueDef("b", input2)),
      ApplyFunction(
        FunctionReference("FakeType", "fakeFunction"),
        List(SelectField(SelectTerm("a"), "i"), SelectField(SelectTerm("b"), "i")),
        types.String))

    val code = ScalarFunctionGenerator.default.getScalaAnonymousFunction(tree)
    val expectedCode = "(a: com.amazon.milan.compiler.scala.TestScalarFunctionGenerator.IntRecord, b: com.amazon.milan.compiler.scala.TestScalarFunctionGenerator.IntRecord) => FakeType.fakeFunction(a.i, b.i)"
    assertEquals(expectedCode, code)
  }

  @Test
  def test_ScalarFunctionGenerator_WithCreateInstance_ReturnsExpectedCode(): Unit = {
    val tree = FunctionDef(
      List(ValueDef("r", TypeDescriptor.of[IntRecord])),
      CreateInstance(createTypeDescriptor[IntRecord], List(SelectField(SelectTerm("r"), "i"))))

    val code = ScalarFunctionGenerator.default.getScalaAnonymousFunction(tree)
    val expectedCode = "(r: com.amazon.milan.compiler.scala.TestScalarFunctionGenerator.IntRecord) => new com.amazon.milan.compiler.scala.TestScalarFunctionGenerator.IntRecord(r.i)"
    assertEquals(expectedCode, code)
  }

  @Test
  def test_ScalarFunctionGenerator_WithUnpackOfTuple_ReturnsCodeUsingMatchStatement(): Unit = {
    val inputType = TypeDescriptor.createTuple[(Int, String)](List(types.Int, types.String))
    val tree = FunctionDef(List(ValueDef("t", inputType)), Unpack(SelectTerm("t"), List("i", "s"), SelectTerm("i")))
    TypeChecker.typeCheck(tree)

    val code = ScalarFunctionGenerator.default.getScalaAnonymousFunction(tree)
    val expectedCode = "(t: Tuple2[Int, String]) => t match { case (i, s) => i }"
    assertEquals(expectedCode, code)
  }
} 
Example 200
Source File: TestStreamAppAggregate.scala    From milan   with Apache License 2.0 5 votes vote down vote up
package com.amazon.milan.compiler.scala

import com.amazon.milan.compiler.scala.testing.KeyValueRecord
import com.amazon.milan.lang
import org.junit.Assert._
import org.junit.Test


@Test
class TestStreamAppAggregate {
  @Test
  def test_StreamAppAggregate_GroupBy_WithMaxBy_OutputsMaxPerGroup(): Unit = {
    def maxByValue(group: lang.Stream[KeyValueRecord]): lang.Stream[KeyValueRecord] =
      group.maxBy(r => r.value)

    val input = lang.Stream.of[KeyValueRecord].withId("input")
    val group = input.groupBy(r => r.key).withId("group")
    val output = group.flatMap((key, group) => maxByValue(group)).withId("output")

    val compiledFunction = StreamAppTester.compile(output)

    val inputRecords = KeyValueRecord.generate(100, 5, 100)
    val outputRecords = compiledFunction(inputRecords.toStream).toList

    val lastOutputRecords = outputRecords.groupBy(_.key).map { case (_, g) => g.last }.toList.sortBy(_.key)
    val expectedLastOutputRecords = inputRecords.groupBy(_.key).map { case (_, g) => g.maxBy(_.value) }.toList.sortBy(_.key)
    assertEquals(expectedLastOutputRecords, lastOutputRecords)
  }

  @Test
  def test_StreamAppAggregate_GroupBy_WithSumBy_OutputsSumPerGroupd(): Unit = {
    def sumByValue(group: lang.Stream[KeyValueRecord]): lang.Stream[KeyValueRecord] =
      group.sumBy(r => r.value, (r, sum) => KeyValueRecord(r.key, sum))

    val input = lang.Stream.of[KeyValueRecord].withId("input")
    val group = input.groupBy(r => r.key).withId("group")
    val output = group.flatMap((key, group) => sumByValue(group)).withId("output")

    val compiledFunction = StreamAppTester.compile(output)

    val inputRecords = KeyValueRecord.generate(100, 5, 100)
    val outputRecords = compiledFunction(inputRecords.toStream).toList

    assertEquals(inputRecords.length, outputRecords.length)

    val lastOutputRecords = outputRecords.groupBy(_.key).map { case (_, g) => g.last }.toList.sortBy(_.key)
    val expectedLastOutputRecords = inputRecords.groupBy(_.key).map { case (k, g) => KeyValueRecord(k, g.map(_.value).sum) }.toList.sortBy(_.key)
    assertEquals(expectedLastOutputRecords, lastOutputRecords)
  }
}