spray.json.JsString Scala Examples
The following examples show how to use spray.json.JsString.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: BasicTestPerformance4Ftp.scala From ohara with Apache License 2.0 | 6 votes |
package oharastream.ohara.it.performance import java.io.{BufferedWriter, OutputStreamWriter} import java.util.concurrent.atomic.LongAdder import oharastream.ohara.common.data.Row import oharastream.ohara.common.util.{CommonUtils, Releasable} import org.junit.AssumptionViolatedException import spray.json.{JsNumber, JsString, JsValue} import scala.jdk.CollectionConverters._ import oharastream.ohara.client.filesystem.FileSystem import scala.concurrent.duration.Duration abstract class BasicTestPerformance4Ftp extends BasicTestPerformance { private[this] val ftpHostname = value(PerformanceTestingUtils.FTP_HOSTNAME_KEY) .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_HOSTNAME_KEY} is required")) private[this] val ftpPort = value(PerformanceTestingUtils.FTP_PORT_KEY) .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_PORT_KEY} is required")) .toInt private[this] val ftpUser = value(PerformanceTestingUtils.FTP_USER_KEY) .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_USER_KEY} is required")) private[this] val ftpPassword = value(PerformanceTestingUtils.FTP_PASSWORD_KEY) .getOrElse(throw new AssumptionViolatedException(s"${PerformanceTestingUtils.FTP_PASSWORD_KEY} is required")) protected val ftpSettings: Map[String, JsValue] = Map( // convert the hostname to IP address oharastream.ohara.connector.ftp.FTP_HOSTNAME_KEY -> JsString(ftpHostname), oharastream.ohara.connector.ftp.FTP_PORT_KEY -> JsNumber(ftpPort), oharastream.ohara.connector.ftp.FTP_USER_NAME_KEY -> JsString(ftpUser), oharastream.ohara.connector.ftp.FTP_PASSWORD_KEY -> JsString(ftpPassword) ) private[this] val csvInputFolderKey = PerformanceTestingUtils.CSV_INPUT_KEY private[this] val csvOutputFolder: String = value(csvInputFolderKey).getOrElse("/input") private[this] val cleanupTestDataKey = PerformanceTestingUtils.DATA_CLEANUP_KEY protected val cleanupTestData: Boolean = value(cleanupTestDataKey).forall(_.toBoolean) protected def setupInputData(timeout: Duration): (String, Long, Long) = { val client = ftpClient() try { if (!PerformanceTestingUtils.exists(client, csvOutputFolder)) PerformanceTestingUtils.createFolder(client, csvOutputFolder) val result = generateData( numberOfRowsToFlush, timeout, (rows: Seq[Row]) => { val file = s"$csvOutputFolder/${CommonUtils.randomString()}" val writer = new BufferedWriter(new OutputStreamWriter(client.create(file))) val count = new LongAdder() val sizeInBytes = new LongAdder() try { val cellNames: Set[String] = rows.head.cells().asScala.map(_.name).toSet writer .append(cellNames.mkString(",")) .append("\n") rows.foreach(row => { val content = row.cells().asScala.map(_.value).mkString(",") count.increment() sizeInBytes.add(content.length) writer.append(content).append("\n") }) (count.longValue(), sizeInBytes.longValue()) } finally Releasable.close(writer) } ) (csvOutputFolder, result._1, result._2) } finally Releasable.close(client) } protected[this] def ftpClient() = FileSystem.ftpBuilder .hostname(ftpHostname) .port(ftpPort) .user(ftpUser) .password(ftpPassword) .build }
Example 2
Source File: CosmosDBUtil.scala From openwhisk with Apache License 2.0 | 6 votes |
package org.apache.openwhisk.core.database.cosmosdb import com.microsoft.azure.cosmosdb.internal.Constants.Properties.{AGGREGATE, E_TAG, ID, SELF_LINK} import org.apache.openwhisk.core.database.cosmosdb.CosmosDBConstants._ import org.apache.openwhisk.core.database.StoreUtils.transform import spray.json.{JsObject, JsString} private[cosmosdb] object CosmosDBConstants { def escapeId(id: String): String = { require(!id.contains("|"), s"Id [$id] should not contain '|'") id.replace("/", "|") } def unescapeId(id: String): String = { require(!id.contains("/"), s"Escaped Id [$id] should not contain '/'") id.replace("|", "/") } def toWhiskJsonDoc(js: JsObject, id: String, etag: Option[JsString]): JsObject = { val fieldsToAdd = Seq((_id, Some(JsString(unescapeId(id)))), (_rev, etag)) transform(stripInternalFields(js), fieldsToAdd, Seq.empty) } private def stripInternalFields(js: JsObject) = { //Strip out all field name starting with '_' which are considered as db specific internal fields JsObject(js.fields.filter { case (k, _) => !k.startsWith("_") && k != cid }) } } private[cosmosdb] object CosmosDBUtil extends CosmosDBUtil
Example 3
Source File: TestPerformance4FtpSource.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi} import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.connector.ftp.FtpSource import oharastream.ohara.it.category.PerformanceGroup import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions import org.junit.Test import org.junit.experimental.categories.Category import spray.json.{JsNumber, JsString} @Category(Array(classOf[PerformanceGroup])) class TestPerformance4FtpSource extends BasicTestPerformance4Ftp { @Test def test(): Unit = { val ftp = ftpClient() try { createTopic() val completedPath = "/completed" val errorPath = "/error" val (path, _, _) = setupInputData(timeoutOfInputData) try { loopInputDataThread(setupInputData) setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[FtpSource].getName, settings = ftpSettings + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path)) + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(ftp, completedPath) )) + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(ftp, errorPath) )) + (CsvConnectorDefinitions.SIZE_OF_FILE_CACHE_KEY -> JsNumber(fileNameCacheSize)) ) sleepUntilEnd() } finally if (cleanupTestData) { PerformanceTestingUtils.deleteFolder(ftp, path) PerformanceTestingUtils.deleteFolder(ftp, completedPath) PerformanceTestingUtils.deleteFolder(ftp, errorPath) } } finally Releasable.close(ftp) } override protected def afterStoppingConnectors( connectorInfos: Seq[ConnectorApi.ConnectorInfo], topicInfos: Seq[TopicApi.TopicInfo] ): Unit = {} }
Example 4
Source File: TestPerformance4SambaSource.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi} import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.CommonUtils import oharastream.ohara.connector.smb.SmbSource import oharastream.ohara.it.category.PerformanceGroup import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions import org.junit.Test import org.junit.experimental.categories.Category import spray.json.{JsNumber, JsString} @Category(Array(classOf[PerformanceGroup])) class TestPerformance4SambaSource extends BasicTestPerformance4Samba { @Test def test(): Unit = { val samba = sambaClient() createTopic() val completedPath = "completed" val errorPath = "error" val (path, _, _) = setupInputData(timeoutOfInputData) try { loopInputDataThread(setupInputData) setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[SmbSource].getName, settings = sambaSettings + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path)) + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(samba, completedPath) )) + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(samba, errorPath) )) + (CsvConnectorDefinitions.SIZE_OF_FILE_CACHE_KEY -> JsNumber(fileNameCacheSize)) ) sleepUntilEnd() } finally if (needDeleteData) { PerformanceTestingUtils.deleteFolder(samba, path) PerformanceTestingUtils.deleteFolder(samba, completedPath) PerformanceTestingUtils.deleteFolder(samba, errorPath) } } override protected def afterStoppingConnectors( connectorInfos: Seq[ConnectorApi.ConnectorInfo], topicInfos: Seq[TopicApi.TopicInfo] ): Unit = {} }
Example 5
Source File: BasicTestPerformance4Samba.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import java.io.{BufferedWriter, OutputStreamWriter} import java.util.concurrent.atomic.LongAdder import oharastream.ohara.client.filesystem.FileSystem import oharastream.ohara.common.data.Row import oharastream.ohara.common.util.{CommonUtils, Releasable} import org.junit.AssumptionViolatedException import spray.json.{JsNumber, JsString, JsValue} import scala.concurrent.duration.Duration import scala.jdk.CollectionConverters._ abstract class BasicTestPerformance4Samba extends BasicTestPerformance { private[this] val sambaHostname: String = sys.env.getOrElse( PerformanceTestingUtils.SAMBA_HOSTNAME_KEY, throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_HOSTNAME_KEY} does not exists!!!") ) private[this] val sambaUsername: String = sys.env.getOrElse( PerformanceTestingUtils.SAMBA_USER_KEY, throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_USER_KEY} does not exists!!!") ) private[this] val sambaPassword: String = sys.env.getOrElse( PerformanceTestingUtils.SAMBA_PASSWORD_KEY, throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_PASSWORD_KEY} does not exists!!!") ) private[this] val sambaPort: Int = sys.env .getOrElse( PerformanceTestingUtils.SAMBA_PORT_KEY, throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_PORT_KEY} does not exists!!!") ) .toInt private[this] val sambaShare: String = sys.env.getOrElse( PerformanceTestingUtils.SAMBA_SHARE_KEY, throw new AssumptionViolatedException(s"${PerformanceTestingUtils.SAMBA_SHARE_KEY} does not exists!!!") ) private[this] val csvInputFolderKey = PerformanceTestingUtils.CSV_INPUT_KEY private[this] val csvOutputFolder: String = value(csvInputFolderKey).getOrElse("input") private[this] val NEED_DELETE_DATA_KEY: String = PerformanceTestingUtils.DATA_CLEANUP_KEY protected[this] val needDeleteData: Boolean = sys.env.getOrElse(NEED_DELETE_DATA_KEY, "true").toBoolean protected val sambaSettings: Map[String, JsValue] = Map( oharastream.ohara.connector.smb.SMB_HOSTNAME_KEY -> JsString(sambaHostname), oharastream.ohara.connector.smb.SMB_PORT_KEY -> JsNumber(sambaPort), oharastream.ohara.connector.smb.SMB_USER_KEY -> JsString(sambaUsername), oharastream.ohara.connector.smb.SMB_PASSWORD_KEY -> JsString(sambaPassword), oharastream.ohara.connector.smb.SMB_SHARE_NAME_KEY -> JsString(sambaShare) ) protected def setupInputData(timeout: Duration): (String, Long, Long) = { val client = sambaClient() try { if (!client.exists(csvOutputFolder)) PerformanceTestingUtils.createFolder(client, csvOutputFolder) val result = generateData( numberOfRowsToFlush, timeout, (rows: Seq[Row]) => { val file = s"$csvOutputFolder/${CommonUtils.randomString()}" val writer = new BufferedWriter(new OutputStreamWriter(client.create(file))) val count = new LongAdder() val sizeInBytes = new LongAdder() try { val cellNames: Set[String] = rows.head.cells().asScala.map(_.name).toSet writer .append(cellNames.mkString(",")) .append("\n") rows.foreach(row => { val content = row.cells().asScala.map(_.value).mkString(",") count.increment() sizeInBytes.add(content.length) writer .append(content) .append("\n") }) (count.longValue(), sizeInBytes.longValue()) } finally Releasable.close(writer) } ) (csvOutputFolder, result._1, result._2) } finally Releasable.close(client) } protected[this] def sambaClient(): FileSystem = FileSystem.smbBuilder .hostname(sambaHostname) .port(sambaPort) .user(sambaUsername) .password(sambaPassword) .shareName(sambaShare) .build() }
Example 6
Source File: TestPerformance4HdfsSink.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo import oharastream.ohara.client.configurator.TopicApi.TopicInfo import oharastream.ohara.client.filesystem.FileSystem import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.connector.hdfs.sink.HDFSSink import oharastream.ohara.it.category.PerformanceGroup import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions import org.junit.experimental.categories.Category import spray.json.{JsNumber, JsString} import org.junit.Test @Category(Array(classOf[PerformanceGroup])) class TestPerformance4HdfsSink extends BasicTestPerformance { private[this] val NEED_DELETE_DATA_KEY: String = PerformanceTestingUtils.DATA_CLEANUP_KEY private[this] val needDeleteData: Boolean = sys.env.getOrElse(NEED_DELETE_DATA_KEY, "true").toBoolean @Test def test(): Unit = { val hdfs = hdfsClient() try { createTopic() produce(timeoutOfInputData) loopInputDataThread(produce) setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[HDFSSink].getName(), settings = Map( CsvConnectorDefinitions.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush), oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY -> JsString(PerformanceTestingUtils.hdfsURL), oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir) ) ) ) sleepUntilEnd() } finally Releasable.close(hdfs) } override protected def afterStoppingConnectors( connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo] ): Unit = { if (needDeleteData) { //Delete file from the HDFS val hdfs = hdfsClient() try topicInfos.foreach { topicInfo => val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}" PerformanceTestingUtils.deleteFolder(hdfs, path) } finally Releasable.close(hdfs) } } private[this] def hdfsClient(): FileSystem = { FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build } }
Example 7
Source File: TestPerformance4JDBCSourceToHDFSSink.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo import oharastream.ohara.client.configurator.TopicApi.TopicInfo import oharastream.ohara.client.filesystem.FileSystem import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.connector.hdfs.sink.HDFSSink import oharastream.ohara.connector.jdbc.source.JDBCSourceConnector import oharastream.ohara.it.category.PerformanceGroup import org.junit.experimental.categories.Category import org.junit.Test import spray.json.{JsNumber, JsString} @Category(Array(classOf[PerformanceGroup])) class TestPerformance4JDBCSourceToHDFSSink extends BasicTestPerformance4Jdbc { override protected val tableName: String = s"TABLE${CommonUtils.randomString().toUpperCase()}" @Test def test(): Unit = { val hdfs = hdfsClient() try { createTable() setupInputData(timeoutOfInputData) loopInputDataThread(setupInputData) createTopic() //Running JDBC Source Connector setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[JDBCSourceConnector].getName(), settings = Map( oharastream.ohara.connector.jdbc.source.DB_URL -> JsString(url), oharastream.ohara.connector.jdbc.source.DB_USERNAME -> JsString(user), oharastream.ohara.connector.jdbc.source.DB_PASSWORD -> JsString(password), oharastream.ohara.connector.jdbc.source.DB_TABLENAME -> JsString(tableName), oharastream.ohara.connector.jdbc.source.TIMESTAMP_COLUMN_NAME -> JsString(timestampColumnName), oharastream.ohara.connector.jdbc.source.DB_SCHEMA_PATTERN -> JsString(user), oharastream.ohara.connector.jdbc.source.JDBC_FETCHDATA_SIZE -> JsNumber(10000), oharastream.ohara.connector.jdbc.source.JDBC_FLUSHDATA_SIZE -> JsNumber(10000) ) ) //Running HDFS Sink Connector setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[HDFSSink].getName(), settings = Map( oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY -> JsString(PerformanceTestingUtils.hdfsURL), oharastream.ohara.connector.hdfs.sink.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush), oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir) ) ) ) sleepUntilEnd() } finally Releasable.close(hdfs) } override protected def afterStoppingConnectors( connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo] ): Unit = { if (needDeleteData) { //Drop table for the database client.dropTable(tableName) //Delete file from the HDFS val hdfs = hdfsClient() try { topicInfos.foreach { topicInfo => val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}" PerformanceTestingUtils.deleteFolder(hdfs, path) } } finally Releasable.close(hdfs) } } private[this] def hdfsClient(): FileSystem = { FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build } }
Example 8
Source File: TestPerformance4FtpSink.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo import oharastream.ohara.client.configurator.TopicApi.TopicInfo import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.connector.ftp.FtpSink import oharastream.ohara.it.category.PerformanceGroup import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions import spray.json.{JsNumber, JsString} import org.junit.Test import org.junit.experimental.categories.Category @Category(Array(classOf[PerformanceGroup])) class TestPerformance4FtpSink extends BasicTestPerformance4Ftp { private[this] val dataDir: String = "/tmp" @Test def test(): Unit = { val ftp = ftpClient() try { createTopic() produce(timeoutOfInputData) loopInputDataThread(produce) setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[FtpSink].getName(), settings = ftpSettings ++ Map( CsvConnectorDefinitions.OUTPUT_FOLDER_KEY -> JsString(PerformanceTestingUtils.createFolder(ftp, dataDir)), CsvConnectorDefinitions.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush) ) ) sleepUntilEnd() } finally Releasable.close(ftp) } override protected def afterStoppingConnectors(connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo]): Unit = if (cleanupTestData) topicInfos.foreach { topicInfo => val path = s"${dataDir}/${topicInfo.topicNameOnKafka}" val ftp = ftpClient() try if (PerformanceTestingUtils.exists(ftp, path)) PerformanceTestingUtils.deleteFolder(ftp, path) finally Releasable.close(ftp) } }
Example 9
Source File: TestPerformance4FtpSourceToHDFSSink.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo import oharastream.ohara.client.configurator.TopicApi.TopicInfo import oharastream.ohara.client.filesystem.FileSystem import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.connector.ftp.FtpSource import oharastream.ohara.connector.hdfs.sink.HDFSSink import oharastream.ohara.it.category.PerformanceGroup import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions import org.junit.Test import org.junit.experimental.categories.Category import spray.json.{JsNumber, JsString} @Category(Array(classOf[PerformanceGroup])) class TestPerformance4FtpSourceToHDFSSink extends BasicTestPerformance4Ftp { private[this] val ftpCompletedPath = "/completed" private[this] val ftpErrorPath = "/error" private[this] val (path, _, _) = setupInputData(timeoutOfInputData) @Test def test(): Unit = { val ftp = ftpClient() val hdfs = hdfsClient() try { createTopic() loopInputDataThread(setupInputData) //Running FTP Source Connector setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[FtpSource].getName, settings = ftpSettings + (CsvConnectorDefinitions.INPUT_FOLDER_KEY -> JsString(path)) + (CsvConnectorDefinitions.COMPLETED_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(ftp, ftpCompletedPath) )) + (CsvConnectorDefinitions.ERROR_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(ftp, ftpErrorPath) )) ) //Running HDFS Sink Connector setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[HDFSSink].getName(), settings = Map( oharastream.ohara.connector.hdfs.sink.HDFS_URL_KEY -> JsString(PerformanceTestingUtils.hdfsURL), oharastream.ohara.connector.hdfs.sink.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush), oharastream.ohara.connector.hdfs.sink.OUTPUT_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(hdfs, PerformanceTestingUtils.dataDir) ) ) ) sleepUntilEnd() } finally { Releasable.close(hdfs) Releasable.close(ftp) } } override protected def afterStoppingConnectors( connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo] ): Unit = { if (cleanupTestData) { //Delete file for the FTP val ftp = ftpClient() val hdfs = hdfsClient() try { PerformanceTestingUtils.deleteFolder(ftp, path) PerformanceTestingUtils.deleteFolder(ftp, ftpCompletedPath) PerformanceTestingUtils.deleteFolder(ftp, ftpErrorPath) //Delete file from the HDFS topicInfos.foreach { topicInfo => val path = s"${PerformanceTestingUtils.dataDir}/${topicInfo.topicNameOnKafka}" PerformanceTestingUtils.deleteFolder(hdfs, path) } } finally { Releasable.close(hdfs) Releasable.close(ftp) } } } private[this] def hdfsClient(): FileSystem = { FileSystem.hdfsBuilder.url(PerformanceTestingUtils.hdfsURL).build } }
Example 10
Source File: TestPerformance4Oracle.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.{ConnectorApi, TopicApi} import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.CommonUtils import oharastream.ohara.connector.jdbc.source.JDBCSourceConnector import oharastream.ohara.it.category.PerformanceGroup import org.junit.Test import org.junit.experimental.categories.Category import spray.json.{JsNumber, JsString} @Category(Array(classOf[PerformanceGroup])) class TestPerformance4Oracle extends BasicTestPerformance4Jdbc { override protected val tableName: String = s"TABLE${CommonUtils.randomString().toUpperCase()}" @Test def test(): Unit = { createTable() setupInputData(timeoutOfInputData) loopInputDataThread(setupInputData) createTopic() try { setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[JDBCSourceConnector].getName(), settings = Map( oharastream.ohara.connector.jdbc.source.DB_URL -> JsString(url), oharastream.ohara.connector.jdbc.source.DB_USERNAME -> JsString(user), oharastream.ohara.connector.jdbc.source.DB_PASSWORD -> JsString(password), oharastream.ohara.connector.jdbc.source.DB_TABLENAME -> JsString(tableName), oharastream.ohara.connector.jdbc.source.TIMESTAMP_COLUMN_NAME -> JsString(timestampColumnName), oharastream.ohara.connector.jdbc.source.DB_SCHEMA_PATTERN -> JsString(user), oharastream.ohara.connector.jdbc.source.JDBC_FETCHDATA_SIZE -> JsNumber(10000), oharastream.ohara.connector.jdbc.source.JDBC_FLUSHDATA_SIZE -> JsNumber(10000) ) ) sleepUntilEnd() } finally if (needDeleteData) client.dropTable(tableName) } override protected def afterStoppingConnectors( connectorInfos: Seq[ConnectorApi.ConnectorInfo], topicInfos: Seq[TopicApi.TopicInfo] ): Unit = {} }
Example 11
Source File: TestPerformance4SambaSink.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.it.performance import oharastream.ohara.client.configurator.ConnectorApi.ConnectorInfo import oharastream.ohara.client.configurator.TopicApi.TopicInfo import oharastream.ohara.common.setting.ConnectorKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.connector.smb.SmbSink import oharastream.ohara.it.category.PerformanceGroup import oharastream.ohara.kafka.connector.csv.CsvConnectorDefinitions import org.junit.Test import org.junit.experimental.categories.Category import spray.json.{JsNumber, JsString} @Category(Array(classOf[PerformanceGroup])) class TestPerformance4SambaSink extends BasicTestPerformance4Samba { private[this] val outputDir: String = "output" @Test def test(): Unit = { val samba = sambaClient() try { createTopic() produce(timeoutOfInputData) loopInputDataThread(produce) setupConnector( connectorKey = ConnectorKey.of(groupName, CommonUtils.randomString(5)), className = classOf[SmbSink].getName, settings = sambaSettings ++ Map( CsvConnectorDefinitions.OUTPUT_FOLDER_KEY -> JsString( PerformanceTestingUtils.createFolder(samba, outputDir) ), CsvConnectorDefinitions.FLUSH_SIZE_KEY -> JsNumber(numberOfCsvFileToFlush) ) ) sleepUntilEnd() } finally Releasable.close(samba) } override protected def afterStoppingConnectors(connectorInfos: Seq[ConnectorInfo], topicInfos: Seq[TopicInfo]): Unit = if (needDeleteData) topicInfos.foreach { topicInfo => val path = s"$outputDir/${topicInfo.topicNameOnKafka}" val samba = sambaClient() try { if (PerformanceTestingUtils.exists(samba, path)) PerformanceTestingUtils.deleteFolder(samba, path) } finally Releasable.close(samba) } }
Example 12
Source File: ArgumentsBuilder.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.agent import oharastream.ohara.agent.ArgumentsBuilder.FileAppender import oharastream.ohara.common.util.CommonUtils import spray.json.{JsNull, JsNumber, JsString, JsValue} import scala.collection.mutable def mainConfigFile(path: String): ArgumentsBuilder override def build: Seq[String] } object ArgumentsBuilder { trait FileAppender { private[this] val props = mutable.Buffer[String]() def append(prop: Int): FileAppender = append(prop.toString) def append(prop: String): FileAppender = append(Seq(prop)) def append(props: Seq[String]): FileAppender = { this.props ++= props this } def append(key: String, value: Boolean): FileAppender = append(s"$key=$value") def append(key: String, value: Short): FileAppender = append(s"$key=$value") def append(key: String, value: Int): FileAppender = append(s"$key=$value") def append(key: String, value: String): FileAppender = append(s"$key=$value") def append(key: String, value: JsValue): FileAppender = append( key, value match { case JsString(value) => value case JsNumber(value) => value.toString case JsNull => throw new IllegalArgumentException(s"JsNull is not legal") case _ => value.toString() } ) def done: ArgumentsBuilder = done(props.toSeq) protected def done(props: Seq[String]): ArgumentsBuilder } def apply(): ArgumentsBuilder = new ArgumentsBuilder { private[this] val files = mutable.Map[String, Seq[String]]() private[this] var mainConfigFile: String = _ override def build: Seq[String] = if (CommonUtils.isEmpty(mainConfigFile)) throw new IllegalArgumentException("you have to define the main configs") else // format: --file path=line0,line1 --file path1=line0,line1 // NOTED: the path and props must be in different line. otherwise, k8s will merge them into single line and our // script will fail to parse the command-line arguments files.flatMap { case (path, props) => Seq("--file", s"$path=${props.mkString(",")}") }.toSeq ++ Seq("--config", mainConfigFile) override def file(path: String): FileAppender = (props: Seq[String]) => { this.files += (path -> props) this } override def mainConfigFile(path: String): ArgumentsBuilder = { this.mainConfigFile = CommonUtils.requireNonEmpty(path) this } } }
Example 13
Source File: TestRouteUtils.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.configurator.route import oharastream.ohara.common.rule.OharaTest import oharastream.ohara.common.setting.SettingDef import oharastream.ohara.common.setting.SettingDef.Permission import org.junit.Test import spray.json.JsString import org.scalatest.matchers.should.Matchers._ class TestRouteUtils extends OharaTest { @Test def testUpdatable(): Unit = { val settings = Map("a" -> JsString("b")) val settingDef = SettingDef .builder() .key("a") .permission(Permission.CREATE_ONLY) .build() keepEditableFields(settings, Seq(settingDef)) shouldBe Map.empty } }
Example 14
Source File: TestObjectRoute.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.configurator.route import java.util.concurrent.TimeUnit import oharastream.ohara.client.configurator.ObjectApi import oharastream.ohara.client.configurator.ObjectApi.ObjectInfo import oharastream.ohara.common.rule.OharaTest import oharastream.ohara.common.setting.ObjectKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.configurator.Configurator import org.junit.{After, Test} import org.scalatest.matchers.should.Matchers._ import spray.json.JsString import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} class TestObjectRoute extends OharaTest { private[this] val configurator = Configurator.builder.fake(1, 1).build() private[this] val objectApi = ObjectApi.access.hostname(configurator.hostname).port(configurator.port) private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS)) private[this] def create(): ObjectInfo = { val key = ObjectKey.of("g", "n") val settings = Map( CommonUtils.randomString() -> JsString(CommonUtils.randomString()), CommonUtils.randomString() -> JsString(CommonUtils.randomString()) ) val objectInfo = result(objectApi.request.key(key).settings(settings).create()) objectInfo.key shouldBe key settings.foreach { case (k, v) => objectInfo.settings(k) shouldBe v } objectInfo } @Test def testCreate(): Unit = create() @Test def testGet(): Unit = { val objectInfo = create() objectInfo shouldBe result(objectApi.get(objectInfo.key)) } @Test def testGetNothing(): Unit = an[IllegalArgumentException] should be thrownBy result(objectApi.get(ObjectKey.of(CommonUtils.randomString(), "n"))) @Test def testList(): Unit = { val objectInfo = create() objectInfo shouldBe result(objectApi.list()).head } @Test def testDelete(): Unit = { val objectInfo = create() result(objectApi.delete(objectInfo.key)) result(objectApi.list()) shouldBe Seq.empty } @Test def testUpdate(): Unit = { val objectInfo = create() val settings = Map( CommonUtils.randomString() -> JsString(CommonUtils.randomString()), CommonUtils.randomString() -> JsString(CommonUtils.randomString()) ) val updated = result(objectApi.request.key(objectInfo.key).settings(settings).update()) settings.foreach { case (k, v) => updated.settings(k) shouldBe v } objectInfo.settings.foreach { case (k, v) => if (k == "lastModified") updated.settings(k) should not be v else updated.settings(k) shouldBe v } } @After def tearDown(): Unit = Releasable.close(configurator) }
Example 15
Source File: ClusterRequest.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.client.configurator import oharastream.ohara.common.annotations.Optional import oharastream.ohara.common.setting.ObjectKey import oharastream.ohara.common.util.CommonUtils import spray.json.DefaultJsonProtocol._ import spray.json.{JsArray, JsNumber, JsObject, JsString, JsValue} import scala.jdk.CollectionConverters._ import scala.collection.mutable protected def key: ObjectKey = ObjectKey.of( settings.get(GROUP_KEY).map(_.convertTo[String]).getOrElse(GROUP_DEFAULT), settings(NAME_KEY).convertTo[String] ) protected val settings: mutable.Map[String, JsValue] = mutable.Map() @Optional("default key is a random string. But it is required in updating") def key(key: ObjectKey): ClusterRequest.this.type = { setting(NAME_KEY, JsString(key.name())) setting(GROUP_KEY, JsString(key.group())) } @Optional("default name is a random string. But it is required in updating") def name(name: String): ClusterRequest.this.type = setting(NAME_KEY, JsString(CommonUtils.requireNonEmpty(name))) @Optional("default is GROUP_DEFAULT") def group(group: String): ClusterRequest.this.type = setting(GROUP_KEY, JsString(CommonUtils.requireNonEmpty(group))) def nodeName(nodeName: String): ClusterRequest.this.type = nodeNames(Set(CommonUtils.requireNonEmpty(nodeName))) def nodeNames(nodeNames: Set[String]): ClusterRequest.this.type = setting(NODE_NAMES_KEY, JsArray(CommonUtils.requireNonEmpty(nodeNames.asJava).asScala.map(JsString(_)).toVector)) @Optional("default value is empty array") def routes(routes: Map[String, String]): ClusterRequest.this.type = setting(ROUTES_KEY, JsObject(routes.map { case (k, v) => k -> JsString(v) })) @Optional("default value is 1024") def initHeap(sizeInMB: Int): ClusterRequest.this.type = setting(INIT_HEAP_KEY, JsNumber(CommonUtils.requirePositiveInt(sizeInMB))) @Optional("default value is 1024") def maxHeap(sizeInMB: Int): ClusterRequest.this.type = setting(MAX_HEAP_KEY, JsNumber(CommonUtils.requirePositiveInt(sizeInMB))) @Optional("extra settings is empty by default") def setting(key: String, value: JsValue): ClusterRequest.this.type = settings(Map(key -> value)) @Optional("extra settings is empty by default") def settings(settings: Map[String, JsValue]): ClusterRequest.this.type = { // We don't have to check the settings is empty here for the following reasons: // 1) we may want to use the benefit of default creation without specify settings // 2) actual checking will be done in the json parser phase of creation or update this.settings ++= settings this } }
Example 16
Source File: ClusterUpdating.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.client.configurator import spray.json.DefaultJsonProtocol._ import spray.json.JsString trait ClusterUpdating extends BasicUpdating { def imageName: Option[String] = raw.get(IMAGE_NAME_KEY).map(_.convertTo[String]) def nodeNames: Option[Set[String]] = raw.get(NODE_NAMES_KEY).map(_.convertTo[Seq[String]].toSet) def routes: Option[Map[String, String]] = raw .get(ROUTES_KEY) .map(_.asJsObject.fields.filter(_._2.isInstanceOf[JsString]).map { case (k, v) => k -> v.convertTo[String] }) def jmxPort: Option[Int] = raw.get(JMX_PORT_KEY).map(_.convertTo[Int]) }
Example 17
Source File: TestFileInfoApi.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.client.configurator import java.io.File import oharastream.ohara.client.configurator.FileInfoApi.FileInfo import oharastream.ohara.common.rule.OharaTest import oharastream.ohara.common.util.CommonUtils import org.junit.Test import org.scalatest.matchers.should.Matchers._ import spray.json.JsString import scala.concurrent.ExecutionContext.Implicits.global class TestFileInfoApi extends OharaTest { private[this] def access: FileInfoApi.Access = FileInfoApi.access.hostname(CommonUtils.hostname()).port(22) @Test def nullKeyInGet(): Unit = an[NullPointerException] should be thrownBy access.get(null) @Test def nullKeyInDelete(): Unit = an[NullPointerException] should be thrownBy access.delete(null) @Test def emptyName(): Unit = an[IllegalArgumentException] should be thrownBy access.request.name("") @Test def nullName(): Unit = an[NullPointerException] should be thrownBy access.request.name(null) @Test def emptyGroup(): Unit = an[IllegalArgumentException] should be thrownBy access.request.group("") @Test def nullGroup(): Unit = an[NullPointerException] should be thrownBy access.request.group(null) @Test def nullFile(): Unit = an[NullPointerException] should be thrownBy access.request.file(null) @Test def nonexistentFile(): Unit = an[IllegalArgumentException] should be thrownBy access.request.file(new File(CommonUtils.randomString(5))) @Test def nullTags(): Unit = an[NullPointerException] should be thrownBy access.request.tags(null) @Test def emptyTags(): Unit = access.request.tags(Map.empty) @Test def bytesMustBeEmptyAfterSerialization(): Unit = { val bytes = CommonUtils.randomString().getBytes() val fileInfo = new FileInfo( group = CommonUtils.randomString(), name = CommonUtils.randomString(), lastModified = CommonUtils.current(), bytes = bytes, url = None, classInfos = Seq.empty, tags = Map("a" -> JsString("b")) ) val copy = FileInfoApi.FILE_INFO_FORMAT.read(FileInfoApi.FILE_INFO_FORMAT.write(fileInfo)) copy.group shouldBe fileInfo.group copy.name shouldBe fileInfo.name copy.lastModified shouldBe fileInfo.lastModified copy.bytes shouldBe Array.empty copy.url shouldBe fileInfo.url copy.tags shouldBe fileInfo.tags } @Test def nullUrlShouldBeRemoved(): Unit = { val fileInfo = new FileInfo( group = CommonUtils.randomString(), name = CommonUtils.randomString(), lastModified = CommonUtils.current(), bytes = Array.emptyByteArray, url = None, classInfos = Seq.empty, tags = Map("a" -> JsString("b")) ) FileInfoApi.FILE_INFO_FORMAT.write(fileInfo).asJsObject.fields should not contain "url" } }
Example 18
Source File: TestObjectApi.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.client.configurator import oharastream.ohara.client.configurator.ObjectApi._ import oharastream.ohara.common.rule.OharaTest import oharastream.ohara.common.setting.ObjectKey import oharastream.ohara.common.util.CommonUtils import org.junit.Test import org.scalatest.matchers.should.Matchers._ import spray.json.JsString class TestObjectApi extends OharaTest { @Test def testSetLastModified(): Unit = ObjectInfo(Map.empty, 123).lastModified shouldBe 123 @Test def testEquals(): Unit = ObjectInfo(Map("a" -> JsString("b")), 123) shouldBe ObjectInfo(Map("a" -> JsString("b")), 123) @Test def testNameInCreation(): Unit = ObjectApi.access.request.name("ab").creation.name shouldBe "ab" @Test def testGroupInCreation(): Unit = ObjectApi.access.request.name("ab").group("ab").creation.name shouldBe "ab" @Test def testKeyInCreation(): Unit = { val creation = ObjectApi.access.request.key(ObjectKey.of("g", "n")).creation creation.group shouldBe "g" creation.name shouldBe "n" } @Test def testTagsInCreation(): Unit = ObjectApi.access.request.key(ObjectKey.of("g", "n")).creation.tags shouldBe Map.empty @Test def testDefaultGroup(): Unit = ObjectApi.access.request.name(CommonUtils.randomString(10)).creation.group shouldBe GROUP_DEFAULT }
Example 19
Source File: HttpServiceWithPostgresIntTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http import com.daml.http.Statement.discard import com.daml.testing.postgresql.PostgresAroundAll import spray.json.{JsString, JsValue} import scala.collection.compat._ import scala.concurrent.Future class HttpServiceWithPostgresIntTest extends AbstractHttpServiceIntegrationTest with PostgresAroundAll { override def jdbcConfig: Option[JdbcConfig] = Some(jdbcConfig_) override def staticContentConfig: Option[StaticContentConfig] = None // has to be lazy because postgresFixture is NOT initialized yet private lazy val jdbcConfig_ = JdbcConfig( driver = "org.postgresql.Driver", url = postgresDatabase.url, user = "test", password = "", createSchema = true) private lazy val dao = dbbackend.ContractDao( jdbcDriver = jdbcConfig_.driver, jdbcUrl = jdbcConfig_.url, username = jdbcConfig_.user, password = jdbcConfig_.password ) "query persists all active contracts" in withHttpService { (uri, encoder, _) => searchExpectOk( searchDataSet, jsObject("""{"templateIds": ["Iou:Iou"], "query": {"currency": "EUR"}}"""), uri, encoder ).flatMap { searchResult: List[domain.ActiveContract[JsValue]] => discard { searchResult should have size 2 } discard { searchResult.map(getField("currency")) shouldBe List.fill(2)(JsString("EUR")) } selectAllDbContracts.flatMap { listFromDb => discard { listFromDb should have size searchDataSet.size.toLong } val actualCurrencyValues: List[String] = listFromDb .flatMap { case (_, _, _, payload, _, _, _) => payload.asJsObject().getFields("currency") } .collect { case JsString(a) => a } val expectedCurrencyValues = List("EUR", "EUR", "GBP", "BTC") // the initial create commands submitted asynchronously, we don't know the exact order, that is why sorted actualCurrencyValues.sorted shouldBe expectedCurrencyValues.sorted } } } @SuppressWarnings(Array("org.wartremover.warts.Any")) private def selectAllDbContracts : Future[List[(String, String, JsValue, JsValue, Vector[String], Vector[String], String)]] = { import com.daml.http.dbbackend.Queries.Implicits._ import dao.logHandler import doobie.implicits._ import doobie.postgres.implicits._ val q = sql"""SELECT contract_id, tpid, key, payload, signatories, observers, agreement_text FROM contract""" .query[(String, String, JsValue, JsValue, Vector[String], Vector[String], String)] dao.transact(q.to(List)).unsafeToFuture() } private def getField(k: String)(a: domain.ActiveContract[JsValue]): JsValue = a.payload.asJsObject().getFields(k) match { case Seq(x) => x case xs @ _ => fail(s"Expected exactly one value, got: $xs") } }
Example 20
Source File: JsonVariant.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.value.json import spray.json.{JsObject, JsString, JsValue} object JsonVariant { def apply(tag: String, body: JsValue): JsObject = JsObject("tag" -> JsString(tag), "value" -> body) def unapply(o: JsObject): Option[(String, JsValue)] = (o.fields.size, o.fields.get("tag"), o.fields.get("value")) match { case (2, Some(JsString(tag)), Some(nv)) => Some((tag, nv)) case _ => None } }
Example 21
Source File: Request.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.engine.trigger import com.daml.lf.data.Ref.{DottedName, Identifier, PackageId, QualifiedName} import spray.json.DefaultJsonProtocol._ import spray.json.{JsString, JsValue, JsonFormat, deserializationError} object Request { implicit object IdentifierFormat extends JsonFormat[Identifier] { def read(value: JsValue): Identifier = value match { case JsString(s) => val components = s.split(":") if (components.length == 3) { val parsed = for { pkgId <- PackageId.fromString(components(0)) mod <- DottedName.fromString(components(1)) entity <- DottedName.fromString(components(2)) } yield Identifier(pkgId, QualifiedName(mod, entity)) parsed match { case Left(e) => deserializationError(e) case Right(id) => id } } else { deserializationError(s"Expected trigger identifier of the form pkgid:mod:name but got $s") } case _ => deserializationError("Expected trigger identifier of the form pkgid:mod:name") } def write(id: Identifier): JsValue = JsString(id.toString) } case class StartParams(triggerName: Identifier) implicit val startParamsFormat = jsonFormat1(StartParams) }
Example 22
Source File: SessionJsonProtocolTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.navigator import com.daml.navigator.model.PartyState import org.scalatest.{FlatSpec, Matchers} import SessionJsonProtocol.userWriter import com.daml.ledger.api.refinements.ApiTypes import spray.json.{JsBoolean, JsObject, JsString} class SessionJsonProtocolTest extends FlatSpec with Matchers { val userClassName = User.getClass.getSimpleName val party = ApiTypes.Party("party") behavior of s"JsonCodec[$userClassName]" it should s"encode $userClassName without role" in { val user = User(id = "id", party = new PartyState(party, false), canAdvanceTime = true) val userJson = JsObject( "id" -> JsString("id"), "party" -> JsString("party"), "canAdvanceTime" -> JsBoolean(true)) userWriter.write(user) shouldEqual userJson } it should s"encode $userClassName with role" in { val user = User( id = "id", party = new PartyState(party, false), role = Some("role"), canAdvanceTime = false) val userJson = JsObject( "id" -> JsString("id"), "role" -> JsString("role"), "party" -> JsString("party"), "canAdvanceTime" -> JsBoolean(false)) userWriter.write(user) shouldEqual userJson } }
Example 23
Source File: JsonSupport.scala From akka-http-slick-sample with MIT License | 5 votes |
package net.softler.data.model import java.sql.Timestamp import java.time.Instant import java.util.UUID import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import spray.json.{DefaultJsonProtocol, JsNumber, JsString, JsValue, JsonFormat, RootJsonFormat} trait BaseJsonProtocol extends DefaultJsonProtocol { implicit val timestampFormat: JsonFormat[Timestamp] = new JsonFormat[Timestamp] { override def write(obj: Timestamp): JsValue = JsNumber(obj.getTime) override def read(json: JsValue): Timestamp = json match { case JsNumber(x) => Timestamp.from(Instant.ofEpochMilli(x.toLong)) case _ => throw new IllegalArgumentException( s"Can not parse json value [$json] to a timestamp object") } } implicit val uuidJsonFormat: JsonFormat[UUID] = new JsonFormat[UUID] { override def write(x: UUID): JsValue = JsString(x.toString) override def read(value: JsValue): UUID = value match { case JsString(x) => UUID.fromString(x) case x => throw new IllegalArgumentException("Expected UUID as JsString, but got " + x.getClass) } } } trait JsonProtocol extends SprayJsonSupport with BaseJsonProtocol { implicit val userFormat: RootJsonFormat[User] = jsonFormat10(User) }
Example 24
Source File: HydraKafkaJsonSupport.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.marshallers import akka.http.scaladsl.marshalling.{Marshaller, Marshalling} import akka.http.scaladsl.model.ContentTypes import akka.util.ByteString import hydra.core.marshallers.HydraJsonSupport import org.apache.kafka.common.{Node, PartitionInfo} import spray.json.{JsNumber, JsObject, JsString, JsValue, JsonFormat} import scala.concurrent.Future trait HydraKafkaJsonSupport extends HydraJsonSupport { implicit object NodeJsonFormat extends JsonFormat[Node] { override def write(node: Node): JsValue = { JsObject( "id" -> JsNumber(node.idString), "host" -> JsString(node.host), "port" -> JsNumber(node.port) ) } override def read(json: JsValue): Node = { json.asJsObject.getFields("id", "host", "port") match { case Seq(id, host, port) => new Node( id.convertTo[Int], host.convertTo[String], port.convertTo[Int] ) case other => spray.json.deserializationError( "Cannot deserialize Node. Invalid input: " + other ) } } } implicit object PartitionInfoJsonFormat extends JsonFormat[PartitionInfo] { import spray.json._ override def write(p: PartitionInfo): JsValue = { JsObject( "partition" -> JsNumber(p.partition()), "leader" -> p.leader().toJson, "isr" -> JsArray(p.inSyncReplicas().toJson) ) } override def read(json: JsValue): PartitionInfo = ??? } implicit val stringFormat = Marshaller[String, ByteString] { ec ⇒ s => Future.successful { List( Marshalling.WithFixedContentType( ContentTypes.`application/json`, () => ByteString(s) ) ) } } }
Example 25
Source File: HydraJsonSupport.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.core.marshallers import java.io.{PrintWriter, StringWriter} import java.util.UUID import akka.actor.ActorPath import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import akka.http.scaladsl.model.StatusCode import hydra.common.util.Resource._ import org.joda.time.DateTime import org.joda.time.format.ISODateTimeFormat import spray.json.{JsString, _} import scala.util.{Failure, Success, Try} implicit def tryWriter[R: JsonWriter]: RootJsonWriter[Try[R]] = new RootJsonWriter[Try[R]] { override def write(responseTry: Try[R]): JsValue = { responseTry match { case Success(r) => JsObject("success" -> r.toJson) case Failure(t) => JsObject("failure" -> t.toJson) } } } implicit object StreamTypeFormat extends RootJsonFormat[StreamType] { def read(json: JsValue): StreamType = json match { case JsString("Notification") => Notification case JsString("History") => History case JsString("CurrentState") => CurrentState case JsString("Telemetry") => Telemetry case _ => { import scala.reflect.runtime.{universe => ru} val tpe = ru.typeOf[StreamType] val clazz = tpe.typeSymbol.asClass throw new DeserializationException( s"expected a streamType of ${clazz.knownDirectSubclasses}, but got $json" ) } } def write(obj: StreamType): JsValue = { JsString(obj.toString) } } implicit val genericErrorFormat = jsonFormat2(GenericError) implicit val topicCreationMetadataFormat = jsonFormat8(TopicMetadataRequest) implicit val genericSchemaFormat = jsonFormat2(GenericSchema) } case class GenericError(status: Int, errorMessage: String) case class TopicMetadataRequest( schema: JsObject, streamType: StreamType, derived: Boolean, deprecated: Option[Boolean], dataClassification: String, contact: String, additionalDocumentation: Option[String], notes: Option[String] ) case class GenericSchema(name: String, namespace: String) { def subject = s"$namespace.$name" } sealed trait StreamType case object Notification extends StreamType case object CurrentState extends StreamType case object History extends StreamType case object Telemetry extends StreamType
Example 26
Source File: ClickhouseJsonSupport.scala From clickhouse-scala-client with GNU Lesser General Public License v3.0 | 5 votes |
package com.crobox.clickhouse.dsl.marshalling import com.crobox.clickhouse.time.IntervalStart import org.joda.time.format.{DateTimeFormatter, DateTimeFormatterBuilder, ISODateTimeFormat} import org.joda.time.{DateTime, DateTimeZone} import spray.json.{JsNumber, JsString, JsValue, JsonFormat, deserializationError, _} import scala.util.Try trait ClickhouseJsonSupport { override def read(json: JsValue): IntervalStart = json match { case JsString(value) => value match { case month(relativeMonth, timezoneId) => new DateTime(UnixStartTimeWithoutTimeZone) .withZoneRetainFields(DateTimeZone.forID(timezoneId)) .plusMonths(relativeMonth.toInt - RelativeMonthsSinceUnixStart) .withZone(DateTimeZone.UTC) case date(dateOnly, timezoneId) => //should handle quarter and year grouping as it returns a date formatter .parseDateTime(dateOnly) .withZoneRetainFields(DateTimeZone.forID(timezoneId)) .withZone(DateTimeZone.UTC) case msTimestamp(millis) => new DateTime(millis.toLong, DateTimeZone.UTC) case timestamp(secs) => new DateTime(secs.toLong * 1000, DateTimeZone.UTC) case _ => // sometimes clickhouse mistakenly returns a long / int value as JsString. Therefor, first try to // parse it as a long... val dateTime = Try { new DateTime(value.toLong, DateTimeZone.UTC) }.toOption // continue with parsing using the formatter dateTime.getOrElse { try { formatter.parseDateTime(value) } catch { case _: IllegalArgumentException => error(s"Couldn't parse $value into valid date time") case _: UnsupportedOperationException => error("Unsupported operation, programmatic misconfiguration?") } } } case JsNumber(millis) => new DateTime(millis.longValue, DateTimeZone.UTC) case _ => throw DeserializationException(s"Unknown date format read from clickhouse for $json") } def error(v: Any): DateTime = { val example = readFormatter.print(0) deserializationError( f"'$v' is not a valid date value. Dates must be in compact ISO-8601 format, e.g. '$example'" ) } } } object ClickhouseJsonSupport extends DefaultJsonProtocol with ClickhouseJsonSupport
Example 27
Source File: ClickhouseIntervalStartFormatTest.scala From clickhouse-scala-client with GNU Lesser General Public License v3.0 | 5 votes |
package com.crobox.clickhouse.dsl.marshalling import com.crobox.clickhouse.dsl.marshalling.ClickhouseJsonSupport.ClickhouseIntervalStartFormat import org.joda.time.{DateTime, DateTimeZone} import spray.json.{JsNumber, JsString} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ClickhouseIntervalStartFormatTest extends AnyFlatSpec with Matchers { val zone = DateTimeZone.forID("Europe/Bucharest") it should "read using month relative" in { ClickhouseIntervalStartFormat.read( JsString(s"${ClickhouseIntervalStartFormat.RelativeMonthsSinceUnixStart + 3}_$zone") ) should be(new DateTime("1970-04-01T00:00:00.000+02:00", DateTimeZone.UTC)) } it should "read using 0 as JsString" in { ClickhouseIntervalStartFormat.read(JsString("0")) should be( new DateTime("1970-01-01T00:00:00.000+00:00", DateTimeZone.UTC) ) } it should "read using 0 as JsNumber" in { ClickhouseIntervalStartFormat.read(JsNumber(0)) should be( new DateTime("1970-01-01T00:00:00.000+00:00", DateTimeZone.UTC) ) } it should "read date only" in { ClickhouseIntervalStartFormat.read(JsString(s"1970-12-17_$zone")) should be( new DateTime("1970-12-17T00:00:00.000+02:00", DateTimeZone.UTC) ) } it should "read timestamp" in { val date = DateTime.now(DateTimeZone.UTC) ClickhouseIntervalStartFormat.read(JsString(s"${date.getMillis}")) should be(date) ClickhouseIntervalStartFormat.read(JsNumber(date.getMillis)) should be(date) } }
Example 28
Source File: Side.scala From scalanda with MIT License | 5 votes |
package com.msilb.scalanda.common.model import spray.json.{JsString, JsValue, JsonFormat, deserializationError} sealed trait Side object Side { case object Buy extends Side { override def toString = "buy" } case object Sell extends Side { override def toString = "sell" } implicit object SideJsonFormat extends JsonFormat[Side] { def write(x: Side): JsValue = JsString(x.toString) def read(value: JsValue): Side = value match { case JsString(x) => x match { case "buy" => Buy case "sell" => Sell } case x => deserializationError("Expected Side as JsString, but got " + x) } } }
Example 29
Source File: JsonSupport.scala From darwin with Apache License 2.0 | 5 votes |
package it.agilelab.darwin.server.rest import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import org.apache.avro.Schema import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, JsonParser, PrettyPrinter, RootJsonFormat} trait JsonSupport extends SprayJsonSupport with DefaultJsonProtocol { implicit val printer: PrettyPrinter.type = PrettyPrinter implicit val schemaFormat: RootJsonFormat[Schema] = new RootJsonFormat[Schema] { override def write(obj: Schema): JsValue = JsonParser(obj.toString(true)) override def read(json: JsValue): Schema = new Schema.Parser().parse(json.prettyPrint) } implicit val schemaWithIdFormat: RootJsonFormat[(Long, Schema)] = new RootJsonFormat[(Long, Schema)] { override def write(obj: (Long, Schema)): JsValue = JsObject(Map( "id" -> JsString(obj._1.toString), "schema" -> schemaFormat.write(obj._2) )) override def read(json: JsValue): (Long, Schema) = json match { case JsObject(fields) => val id = fields.get("id") match { case Some(JsString(number)) => number case _ => throw new Exception("Id field should be a long") } val schema = fields.get("schema") match { case Some(x@JsObject(_)) => x case _ => throw new Exception("schema should be an object") } (id.toLong, schemaFormat.read(schema)) case _ => throw new Exception("should be an object") } } }
Example 30
Source File: Unmarshallers.scala From JustinDB with Apache License 2.0 | 5 votes |
package justin.httpapi import java.util.UUID import akka.http.scaladsl.unmarshalling._ import akka.stream.Materializer import spray.json.{JsString, JsValue, JsonFormat, _} import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} object Unmarshallers { implicit val UuidFormat = new JsonFormat[UUID] { override def read(json: JsValue): UUID = { json match { case JsString(uuid) => Try(UUID.fromString(uuid)) match { case Success(parsedUuid) => parsedUuid case Failure(_) => deserializationError("UUID could not be created from given string") } case _ => deserializationError("UUID could not be converted to UUID object.") } } override def write(obj: UUID): JsValue = JsString(obj.toString) } object UUIDUnmarshaller extends FromStringUnmarshaller[UUID] { override def apply(value: String)(implicit ec: ExecutionContext, materializer: Materializer): Future[UUID] = { Future.apply(UUID.fromString(value)) } } }
Example 31
Source File: UnmarshallersTest.scala From JustinDB with Apache License 2.0 | 5 votes |
package justin.httpapi import java.util.UUID import org.scalatest.{FlatSpec, Matchers} import spray.json.{DeserializationException, JsNumber, JsString} class UnmarshallersTest extends FlatSpec with Matchers { behavior of "Unmarshaller" it should "encode JSON into UUID" in { val uuid = UUID.randomUUID() val jsString = JsString(uuid.toString) Unmarshallers.UuidFormat.read(jsString) shouldBe uuid } it should "decode UUID into JSON" in { val uuid = UUID.randomUUID() val expectedJSON = Unmarshallers.UuidFormat.write(uuid) expectedJSON shouldBe JsString(uuid.toString) } it should "handle not expected format of JSON" in { val jsNumber = JsNumber(1) intercept[DeserializationException] { Unmarshallers.UuidFormat.read(jsNumber) } } it should "handle wrong format of UUID" in { val fakeUUID = "1-2-3-4" val jsString = JsString(fakeUUID) intercept[DeserializationException] { Unmarshallers.UuidFormat.read(jsString) } } }
Example 32
Source File: Boot.scala From reactive-consul with MIT License | 5 votes |
package stormlantern.consul.example import java.net.URL import akka.actor.ActorSystem import akka.io.IO import akka.pattern._ import akka.util.Timeout import spray.can.Http import spray.json.{ JsString, JsObject } import stormlantern.consul.client.discovery.{ ConnectionStrategy, ServiceDefinition, ConnectionProvider } import stormlantern.consul.client.loadbalancers.RoundRobinLoadBalancer import stormlantern.consul.client.ServiceBroker import stormlantern.consul.client.DNS import scala.concurrent.Future import scala.concurrent.duration._ object Boot extends App { implicit val system = ActorSystem("reactive-consul") implicit val executionContext = system.dispatcher val service = system.actorOf(ReactiveConsulHttpServiceActor.props(), "webservice") implicit val timeout = Timeout(5.seconds) IO(Http) ? Http.Bind(service, interface = "0.0.0.0", port = 8080) def connectionProviderFactory = (host: String, port: Int) ⇒ new ConnectionProvider { val client = new SprayExampleServiceClient(new URL(s"http://$host:$port")) override def getConnection: Future[Any] = Future.successful(client) } val connectionStrategy1 = ConnectionStrategy("example-service-1", connectionProviderFactory) val connectionStrategy2 = ConnectionStrategy("example-service-2", connectionProviderFactory) val services = Set(connectionStrategy1, connectionStrategy2) val serviceBroker = ServiceBroker(DNS.lookup("consul-8500.service.consul"), services) system.scheduler.schedule(5.seconds, 5.seconds) { serviceBroker.withService("example-service-1") { client: SprayExampleServiceClient ⇒ client.identify }.foreach(println) serviceBroker.withService("example-service-2") { client: SprayExampleServiceClient ⇒ client.identify }.foreach(println) } }
Example 33
Source File: ControllerInstanceIdTests.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.entity.test import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.Matchers import org.scalatest.junit.JUnitRunner import org.apache.openwhisk.core.entity.{ControllerInstanceId, InstanceId} import spray.json.{JsObject, JsString} import scala.util.Success @RunWith(classOf[JUnitRunner]) class ControllerInstanceIdTests extends FlatSpec with Matchers { behavior of "ControllerInstanceId" it should "accept usable characters" in { Seq("a", "1", "a.1", "a_1").foreach { s => ControllerInstanceId(s).asString shouldBe s } } it should "reject unusable characters" in { Seq(" ", "!", "$", "a" * 129).foreach { s => an[IllegalArgumentException] shouldBe thrownBy { ControllerInstanceId(s) } } } it should "serialize and deserialize ControllerInstanceId" in { val i = ControllerInstanceId("controller0") i.serialize shouldBe JsObject("asString" -> JsString(i.asString), "instanceType" -> JsString(i.instanceType)).compactPrint i.serialize shouldBe i.toJson.compactPrint InstanceId.parse(i.serialize) shouldBe Success(i) } }
Example 34
Source File: InvokerInstanceIdTests.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.entity.test import org.apache.openwhisk.core.entity.size.SizeInt import org.apache.openwhisk.core.entity.{ByteSize, InstanceId, InvokerInstanceId} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FlatSpec, Matchers} import spray.json.{JsNumber, JsObject, JsString} import scala.util.Success @RunWith(classOf[JUnitRunner]) class InvokerInstanceIdTests extends FlatSpec with Matchers { behavior of "InvokerInstanceIdTests" val defaultUserMemory: ByteSize = 1024.MB it should "serialize and deserialize InvokerInstanceId" in { val i = InvokerInstanceId(0, userMemory = defaultUserMemory) i.serialize shouldBe JsObject( "instance" -> JsNumber(i.instance), "userMemory" -> JsString(i.userMemory.toString), "instanceType" -> JsString(i.instanceType)).compactPrint i.serialize shouldBe i.toJson.compactPrint InstanceId.parse(i.serialize) shouldBe Success(i) } it should "serialize and deserialize InvokerInstanceId with optional field" in { val i1 = InvokerInstanceId(0, uniqueName = Some("uniqueInvoker"), userMemory = defaultUserMemory) i1.serialize shouldBe JsObject( "instance" -> JsNumber(i1.instance), "userMemory" -> JsString(i1.userMemory.toString), "instanceType" -> JsString(i1.instanceType), "uniqueName" -> JsString(i1.uniqueName.getOrElse(""))).compactPrint i1.serialize shouldBe i1.toJson.compactPrint InstanceId.parse(i1.serialize) shouldBe Success(i1) val i2 = InvokerInstanceId( 0, uniqueName = Some("uniqueInvoker"), displayedName = Some("displayedInvoker"), userMemory = defaultUserMemory) i2.serialize shouldBe JsObject( "instance" -> JsNumber(i2.instance), "userMemory" -> JsString(i2.userMemory.toString), "instanceType" -> JsString(i2.instanceType), "uniqueName" -> JsString(i2.uniqueName.getOrElse("")), "displayedName" -> JsString(i2.displayedName.getOrElse(""))).compactPrint i2.serialize shouldBe i2.toJson.compactPrint InstanceId.parse(i2.serialize) shouldBe Success(i2) } }
Example 35
Source File: ElasticSearchActivationStoreBehaviorBase.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.database.elasticsearch import org.scalatest.FlatSpec import org.apache.openwhisk.core.controller.test.WhiskAuthHelpers import org.apache.openwhisk.core.database.UserContext import org.apache.openwhisk.core.database.test.behavior.ActivationStoreBehaviorBase import org.apache.openwhisk.core.entity.{ActivationResponse, Parameters, WhiskActivation} import org.testcontainers.elasticsearch.ElasticsearchContainer import pureconfig.loadConfigOrThrow import spray.json.{JsObject, JsString} trait ElasticSearchActivationStoreBehaviorBase extends FlatSpec with ActivationStoreBehaviorBase { val imageName = loadConfigOrThrow[String]("whisk.elasticsearch.docker-image") val container = new ElasticsearchContainer(imageName) container.start() override def afterAll = { container.close() super.afterAll() } override def storeType = "ElasticSearch" val creds = WhiskAuthHelpers.newIdentity() override val context = UserContext(creds) override lazy val activationStore = { val storeConfig = ElasticSearchActivationStoreConfig("http", container.getHttpHostAddress, "unittest-%s", "fake", "fake") new ElasticSearchActivationStore(None, storeConfig, true) } // add result and annotations override def newActivation(ns: String, actionName: String, start: Long): WhiskActivation = { super .newActivation(ns, actionName, start) .copy( response = ActivationResponse.success(Some(JsObject("name" -> JsString("whisker")))), annotations = Parameters("database", "elasticsearch") ++ Parameters("type", "test")) } }
Example 36
Source File: Subject.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.entity import scala.util.Try import spray.json.JsString import spray.json.JsValue import spray.json.RootJsonFormat import spray.json.deserializationError protected[core] class Subject private (private val subject: String) extends AnyVal { protected[core] def asString = subject // to make explicit that this is a string conversion protected[entity] def toJson = JsString(subject) override def toString = subject } protected[core] object Subject extends ArgNormalizer[Subject] { protected[core] def apply(): Subject = { Subject("anon-" + rand.alphanumeric.take(27).mkString) } override protected[core] implicit val serdes = new RootJsonFormat[Subject] { def write(s: Subject) = s.toJson def read(value: JsValue) = Try { val JsString(s) = value Subject(s) } getOrElse deserializationError("subject malformed") } private val rand = new scala.util.Random() }
Example 37
Source File: SemVer.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.entity import spray.json.deserializationError import spray.json.JsString import spray.json.JsValue import spray.json.RootJsonFormat import scala.util.Try protected[entity] def apply(str: String): SemVer = { try { val parts = if (str != null && str.nonEmpty) str.split('.') else Array[String]() val major = if (parts.size >= 1) parts(0).toInt else 0 val minor = if (parts.size >= 2) parts(1).toInt else 0 val patch = if (parts.size >= 3) parts(2).toInt else 0 SemVer(major, minor, patch) } catch { case _: Throwable => throw new IllegalArgumentException(s"bad semantic version $str") } } implicit val serdes = new RootJsonFormat[SemVer] { def write(v: SemVer) = v.toJson def read(value: JsValue) = Try { val JsString(v) = value SemVer(v) } getOrElse deserializationError("semantic version malformed") } }
Example 38
Source File: Privilege.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.entitlement import scala.util.Try import spray.json.DeserializationException import spray.json.JsString import spray.json.JsValue import spray.json.RootJsonFormat sealed trait Privilege protected[core] object Privilege extends Enumeration { case object READ extends Privilege case object PUT extends Privilege case object DELETE extends Privilege case object ACTIVATE extends Privilege case object REJECT extends Privilege val CRUD: Set[Privilege] = Set(READ, PUT, DELETE) val ALL: Set[Privilege] = CRUD + ACTIVATE def fromName(name: String) = name match { case "READ" => READ case "PUT" => PUT case "DELETE" => DELETE case "ACTIVATE" => ACTIVATE case "REJECT" => REJECT } implicit val serdes = new RootJsonFormat[Privilege] { def write(p: Privilege) = JsString(p.toString) def read(json: JsValue) = Try { val JsString(str) = json Privilege.fromName(str.trim.toUpperCase) } getOrElse { throw new DeserializationException("Privilege must be a valid string") } } }
Example 39
Source File: YARNComponentActor.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.yarn import akka.actor.{Actor, ActorSystem} import akka.http.scaladsl.model.{HttpMethods, StatusCodes} import akka.stream.ActorMaterializer import org.apache.openwhisk.common.Logging import org.apache.openwhisk.core.entity.ExecManifest.ImageName import org.apache.openwhisk.core.yarn.YARNComponentActor.{CreateContainerAsync, RemoveContainer} import spray.json.{JsArray, JsNumber, JsObject, JsString} import scala.concurrent.ExecutionContext object YARNComponentActor { case object CreateContainerAsync case class RemoveContainer(component_instance_name: String) } class YARNComponentActor(actorSystem: ActorSystem, logging: Logging, yarnConfig: YARNConfig, serviceName: String, imageName: ImageName) extends Actor { implicit val as: ActorSystem = actorSystem implicit val materializer: ActorMaterializer = ActorMaterializer() implicit val ec: ExecutionContext = actorSystem.dispatcher //Adding a container via the YARN REST API is actually done by flexing the component's container pool to a certain size. // This actor must track the current containerCount in order to make the correct scale-up request. var containerCount: Int = 0 def receive: PartialFunction[Any, Unit] = { case CreateContainerAsync => sender ! createContainerAsync case RemoveContainer(component_instance_name) => sender ! removeContainer(component_instance_name) case input => throw new IllegalArgumentException("Unknown input: " + input) sender ! false } def createContainerAsync(): Unit = { logging.info(this, s"Using YARN to create a container with image ${imageName.name}...") val body = JsObject("number_of_containers" -> JsNumber(containerCount + 1)).compactPrint val response = YARNRESTUtil.submitRequestWithAuth( yarnConfig.authType, HttpMethods.PUT, s"${yarnConfig.masterUrl}/app/v1/services/$serviceName/components/${imageName.name}", body) response match { case httpresponse(StatusCodes.OK, content) => logging.info(this, s"Added container: ${imageName.name}. Response: $content") containerCount += 1 case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging) } } def removeContainer(component_instance_name: String): Unit = { logging.info(this, s"Removing ${imageName.name} container: $component_instance_name ") if (containerCount <= 0) { logging.warn(this, "Already at 0 containers") } else { val body = JsObject( "components" -> JsArray( JsObject( "name" -> JsString(imageName.name), "decommissioned_instances" -> JsArray(JsString(component_instance_name))))).compactPrint val response = YARNRESTUtil.submitRequestWithAuth( yarnConfig.authType, HttpMethods.PUT, s"${yarnConfig.masterUrl}/app/v1/services/$serviceName", body) response match { case httpresponse(StatusCodes.OK, content) => logging.info( this, s"Successfully removed ${imageName.name} container: $component_instance_name. Response: $content") containerCount -= 1 case httpresponse(_, _) => YARNRESTUtil.handleYARNRESTError(logging) } } } }
Example 40
Source File: StandardFormatsSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject } import org.specs2.mutable._ import scala.Right class StandardFormatsSpec extends Specification with BasicJsonProtocol { case class Person(name: Option[String], value: Option[Int]) implicit object PersonFormat extends JsonFormat[Person] { def write[J](x: Person, builder: Builder[J]): Unit = { builder.beginObject() builder.addField("name", x.name) builder.addField("value", x.value) builder.endObject() } def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person = jsOpt match { case Some(js) => unbuilder.beginObject(js) val name = unbuilder.readField[Option[String]]("name") val value = unbuilder.readField[Option[Int]]("value") unbuilder.endObject() Person(name, value) case None => deserializationError("Expected JsObject but found None") } } "The optionFormat" should { "convert None to JsNull" in { Converter.toJsonUnsafe(None.asInstanceOf[Option[Int]]) mustEqual JsNull } "convert JsNull to None" in { Converter.fromJsonUnsafe[Option[Int]](JsNull) mustEqual None } "convert Some(Hello) to JsString(Hello)" in { Converter.toJsonUnsafe(Some("Hello").asInstanceOf[Option[String]]) mustEqual JsString("Hello") } "convert JsString(Hello) to Some(Hello)" in { Converter.fromJsonUnsafe[Option[String]](JsString("Hello")) mustEqual Some("Hello") } "omit None fields" in { Converter.toJsonUnsafe(Person(None, None)) mustEqual JsObject() } } "The eitherFormat" should { val a: Either[Int, String] = Left(42) val b: Either[Int, String] = Right("Hello") "convert the left side of an Either value to Json" in { Converter.toJsonUnsafe(a) mustEqual JsNumber(42) } "convert the right side of an Either value to Json" in { Converter.toJsonUnsafe(b) mustEqual JsString("Hello") } "convert the left side of an Either value from Json" in { Converter.fromJsonUnsafe[Either[Int, String]](JsNumber(42)) mustEqual Left(42) } "convert the right side of an Either value from Json" in { Converter.fromJsonUnsafe[Either[Int, String]](JsString("Hello")) mustEqual Right("Hello") } } }
Example 41
Source File: LListFormatSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import org.specs2.mutable._ import java.util.Arrays import spray.json.{ JsArray, JsNumber, JsString, JsObject } class LListFormatsSpec extends Specification with BasicJsonProtocol { "The llistFormat" should { val empty = LNil val emptyObject = JsObject() val list = ("Z", 2) :*: ("a", 1) :*: LNil val obj = JsObject("$fields" -> JsArray(JsString("Z"), JsString("a")), "Z" -> JsNumber(2), "a" -> JsNumber(1)) val nested = ("b", list) :*: LNil val nestedObj = JsObject("$fields" -> JsArray(JsString("b")), "b" -> obj) "convert an empty list to JObject" in { Converter.toJsonUnsafe(empty) mustEqual emptyObject } "convert a list to JObject" in { Converter.toJsonUnsafe(list) mustEqual obj } "convert a nested list to JObject" in { Converter.toJsonUnsafe(nested) mustEqual nestedObj } "convert a JObject to list" in { Converter.fromJsonUnsafe[Int :*: Int :*: LNil](obj) mustEqual list } "convert a nested JObject to list" in { Converter.fromJsonUnsafe[(Int :*: Int :*: LNil) :*: LNil](nestedObj) mustEqual nested } val obj2 = JsObject("$fields" -> JsArray(JsString("f")), "f" -> JsString("foo")) val nested2Obj = JsObject("$fields" -> JsArray(JsString("b"), JsString("c")), "b" -> obj, "c" -> obj2) val list2 = ("f", "foo") :*: LNil val nested2 = ("b", list) :*: ("c", list2) :*: LNil "convert a 2 nested JObjects to list" in { Converter.fromJsonUnsafe[(Int :*: Int :*: LNil) :*: (String :*: LNil) :*: LNil](nested2Obj) mustEqual nested2 } } }
Example 42
Source File: JavaPrimitiveSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject } import org.specs2.mutable._ import java.lang.{ Integer => JInteger, Long => JLong, Boolean => JBoolean, Float => JFloat, Double => JDouble, Byte => JByte, Short => JShort, Character => JCharacter } class JavaPrimitiveFormatsSpec extends Specification with BasicJsonProtocol { "The JIntegerJsonFormat" should { "convert an JInteger to a JsNumber" in { Converter.toJsonUnsafe[JInteger](42: JInteger) mustEqual JsNumber(42) } "convert a JsNumber to an Int" in { Converter.fromJsonUnsafe[JInteger](JsNumber(42)) mustEqual (42: JInteger) } } "The JLongJsonFormat" should { "convert a JLong to a JsNumber" in { Converter.toJsonUnsafe[JLong](7563661897011259335L: JLong) mustEqual JsNumber(7563661897011259335L) } "convert a JsNumber to a JLong" in { Converter.fromJsonUnsafe[JLong](JsNumber(7563661897011259335L)) mustEqual (7563661897011259335L: JLong) } } "The JFloatJsonFormat" should { "convert a JFloat to a JsNumber" in { Converter.toJsonUnsafe[JFloat](4.2f: JFloat) mustEqual JsNumber(4.2f) } "convert a JsNumber to a JFloat" in { Converter.fromJsonUnsafe[JFloat](JsNumber(4.2f)) mustEqual (4.2f: JFloat) } } "The JDoubleJsonFormat" should { "convert a JDouble to a JsNumber" in { Converter.toJsonUnsafe[JDouble](4.2: JDouble) mustEqual JsNumber(4.2) } "convert a JsNumber to a JDouble" in { Converter.fromJsonUnsafe[JDouble](JsNumber(4.2)) mustEqual (4.2: JDouble) } } "The JByteJsonFormat" should { "convert a JByte to a JsNumber" in { Converter.toJsonUnsafe[JByte](42.toByte: JByte) mustEqual JsNumber(42) } "convert a JsNumber to a JByte" in { Converter.fromJsonUnsafe[JByte](JsNumber(42)) mustEqual (42.toByte: JByte) } } "The JShortJsonFormat" should { "convert a JShort to a JsNumber" in { Converter.toJsonUnsafe(42.toShort: JShort) mustEqual JsNumber(42) } "convert a JsNumber to a JShort" in { Converter.fromJsonUnsafe[JShort](JsNumber(42)) mustEqual (42.toShort: JShort) } } "The JBooleanJsonFormat" should { "convert true to a JsTrue" in { Converter.toJsonUnsafe[JBoolean](true: JBoolean) mustEqual JsTrue } "convert false to a JsFalse" in { Converter.toJsonUnsafe[JBoolean](false: JBoolean) mustEqual JsFalse } "convert a JsTrue to true" in { Converter.fromJsonUnsafe[JBoolean](JsTrue) mustEqual true } "convert a JsFalse to false" in { Converter.fromJsonUnsafe[JBoolean](JsFalse) mustEqual false } } "The JCharacterJsonFormat" should { "convert a JCharacter to a JsString" in { Converter.toJsonUnsafe[JCharacter]('c': JCharacter) mustEqual JsString("c") } "convert a JsString to a JCharacter" in { Converter.fromJsonUnsafe[JCharacter](JsString("c")) mustEqual ('c': JCharacter) } } }
Example 43
Source File: JavaExtraFormatsSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsObject } import org.specs2.mutable._ import java.util.{ UUID, Optional } import java.net.{ URI, URL } import java.io.File class JavaExtraFormatsSpec extends Specification with BasicJsonProtocol { case class Person(name: Optional[String], value: Optional[Int]) implicit object PersonFormat extends JsonFormat[Person] { def write[J](x: Person, builder: Builder[J]): Unit = { builder.beginObject() builder.addField("name", x.name) builder.addField("value", x.value) builder.endObject() } def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person = jsOpt match { case Some(js) => unbuilder.beginObject(js) val name = unbuilder.readField[Optional[String]]("name") val value = unbuilder.readField[Optional[Int]]("value") unbuilder.endObject() Person(name, value) case None => deserializationError("Expected JsObject but found None") } } "The uuidStringIso" should { val uuid = UUID.fromString("abc220ea-2a01-11e6-b67b-9e71128cae77") "convert a UUID to JsString" in { Converter.toJsonUnsafe(uuid) mustEqual JsString("abc220ea-2a01-11e6-b67b-9e71128cae77") } "convert the JsString back to the UUID" in { Converter.fromJsonUnsafe[UUID](JsString("abc220ea-2a01-11e6-b67b-9e71128cae77")) mustEqual uuid } } "The uriStringIso" should { val uri = new URI("http://localhost") "convert a URI to JsString" in { Converter.toJsonUnsafe(uri) mustEqual JsString("http://localhost") } "convert the JsString back to the URI" in { Converter.fromJsonUnsafe[URI](JsString("http://localhost")) mustEqual uri } } "The urlStringIso" should { val url = new URL("http://localhost") "convert a URL to JsString" in { Converter.toJsonUnsafe(url) mustEqual JsString("http://localhost") } "convert the JsString back to the URI" in { Converter.fromJsonUnsafe[URL](JsString("http://localhost")) mustEqual url } } "The fileStringIso" should { val f = new File("/tmp") val f2 = new File(new File("src"), "main") "convert a File to JsString" in { Converter.toJsonUnsafe(f) mustEqual JsString("file:///tmp/") } "convert a relative path to JsString" in { // https://tools.ietf.org/html/rfc3986#section-4.2 Converter.toJsonUnsafe(f2) mustEqual JsString("src/main") } "convert the JsString back to the File" in { Converter.fromJsonUnsafe[File](JsString("file:///tmp/")) mustEqual f } "convert the JsString back to the relative path" in { Converter.fromJsonUnsafe[File](JsString("src/main")) mustEqual f2 } } "The optionalFormat" should { "convert Optional.empty to JsNull" in { Converter.toJsonUnsafe(Optional.empty[Int]) mustEqual JsNull } "convert JsNull to None" in { Converter.fromJsonUnsafe[Optional[Int]](JsNull) mustEqual Optional.empty[Int] } "convert Some(Hello) to JsString(Hello)" in { Converter.toJsonUnsafe(Optional.of("Hello")) mustEqual JsString("Hello") } "convert JsString(Hello) to Some(Hello)" in { Converter.fromJsonUnsafe[Optional[String]](JsString("Hello")) mustEqual Optional.of("Hello") } "omit None fields" in { Converter.toJsonUnsafe(Person(Optional.empty[String], Optional.empty[Int])) mustEqual JsObject() } } }
Example 44
Source File: BuilderSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import org.specs2.mutable._ import java.util.Arrays import spray.json.{ JsArray, JsNumber, JsString, JsObject } import LList._ class BuilderSpec extends Specification with BasicJsonProtocol { case class Person(name: String, value: Int) implicit object PersonFormat extends JsonFormat[Person] { def write[J](x: Person, builder: Builder[J]): Unit = { builder.beginObject() builder.addField("name", x.name) builder.addField("value", x.value) builder.endObject() } def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Person = jsOpt match { case Some(js) => unbuilder.beginObject(js) val name = unbuilder.readField[String]("name") val value = unbuilder.readField[Int]("value") unbuilder.endObject() Person(name, value) case None => deserializationError("Expected JsObject but found None") } } "Custom format using builder" should { val p1 = Person("Alice", 1) val personJs = JsObject("name" -> JsString("Alice"), "value" -> JsNumber(1)) "convert from value to JObject" in { Converter.toJsonUnsafe(p1) mustEqual personJs } "convert from JObject to the same value" in { Converter.fromJsonUnsafe[Person](personJs) mustEqual p1 } } }
Example 45
Source File: IsoLListFormatSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import spray.json.{ JsArray, JsNumber, JsString, JsObject } import org.specs2.mutable.Specification class IsoLListFormatSpec extends Specification with BasicJsonProtocol { sealed trait Contact case class Person(name: String, value: Option[Int]) extends Contact case class Organization(name: String, value: Option[Int]) extends Contact implicit val personIso: IsoLList.Aux[Person, String :*: Option[Int] :*: LNil] = LList.isoCurried( { p: Person => ("name", p.name) :*: ("value", p.value) :*: LNil }) { in => Person( in.find[String]("name").get, in.find[Option[Int]]("value").flatten) } implicit val organizationIso: IsoLList.Aux[Organization, String :*: Option[Int] :*: LNil] = LList.isoCurried( { o: Organization => ("name", o.name) :*: ("value", o.value) :*: LNil }) { in => Organization( in.find[String]("name").get, in.find[Option[Int]]("value").flatten) } implicit val ContactFormat: JsonFormat[Contact] = flatUnionFormat2[Contact, Person, Organization]("$type") val p1 = Person("Alice", Some(1)) val personJs = JsObject("$fields" -> JsArray(JsString("name"), JsString("value")), "name" -> JsString("Alice"), "value" -> JsNumber(1)) val c1: Contact = Organization("Company", None) val contactJs = JsObject( "$type" -> JsString("Organization"), "$fields" -> JsArray(JsString("name"), JsString("value")), "name" -> JsString("Company") ) "The isomorphism from a custom type to LList" should { "convert from value to JObject" in { Converter.toJsonUnsafe(p1) mustEqual personJs } "convert from JObject to the same value" in { Converter.fromJsonUnsafe[Person](personJs) mustEqual p1 } "convert from a union value to JObject" in { Converter.toJsonUnsafe(c1) mustEqual contactJs } } }
Example 46
Source File: TupleFormatsSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import spray.json.{ JsValue, JsNumber, JsString, JsNull, JsTrue, JsFalse, JsArray } import org.specs2.mutable._ import scala.Right class TupleFormatsSpec extends Specification with BasicJsonProtocol { "The tuple1Format" should { "convert (42) to a JsNumber" in { Converter.toJsonUnsafe(Tuple1(42)) mustEqual JsArray(JsNumber(42)) } "be able to convert a JsNumber to a Tuple1[Int]" in { Converter.fromJsonUnsafe[Tuple1[Int]](JsArray(JsNumber(42))) mustEqual Tuple1(42) } } "The tuple2Format" should { val json = JsArray(JsNumber(42), JsNumber(4.2)) "convert (42, 4.2) to a JsArray" in { Converter.toJsonUnsafe((42, 4.2)) mustEqual json } "be able to convert a JsArray to a (Int, Double)]" in { Converter.fromJsonUnsafe[(Int, Double)](json) mustEqual (42, 4.2) } } "The tuple3Format" should { val json = JsArray(JsNumber(42), JsNumber(4.2), JsString("hello")) "convert (42, 4.2, \"hello\") to a JsArray" in { Converter.toJsonUnsafe((42, 4.2, "hello")) mustEqual json } "be able to convert a JsArray to a (Int, Double, Int)]" in { Converter.fromJsonUnsafe[(Int, Double, String)](json) mustEqual (42, 4.2, "hello") } } }
Example 47
Source File: UnionFormatSpec.scala From sjson-new with Apache License 2.0 | 5 votes |
package sjsonnew package support.spray import org.specs2.mutable._ import java.util.Arrays import spray.json.{ JsArray, JsNumber, JsString, JsObject } import LList._ class UnionFormatsSpec extends Specification with BasicJsonProtocol { sealed trait Fruit case class Apple() extends Fruit sealed trait Citrus extends Fruit case class Orange() extends Citrus implicit object AppleJsonFormat extends JsonFormat[Apple] { def write[J](x: Apple, builder: Builder[J]): Unit = { builder.beginObject() builder.addField("x", 0) builder.endObject() } def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Apple = jsOpt match { case Some(js) => val result = unbuilder.beginObject(js) match { case 1 => val x = unbuilder.readField[Int]("x") if (x == 0) Apple() else deserializationError(s"Unexpected value: $x") case x => deserializationError(s"Unexpected number of fields: $x") } unbuilder.endObject() result case None => deserializationError("Expected JsNumber but found None") } } implicit object OrangeJsonFormat extends JsonFormat[Orange] { def write[J](x: Orange, builder: Builder[J]): Unit = { builder.beginObject() builder.addField("x", 1) builder.endObject() } def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Orange = jsOpt match { case Some(js) => val result = unbuilder.beginObject(js) match { case 1 => val x = unbuilder.readField[Int]("x") if (x == 1) Orange() else deserializationError(s"Unexpected value: $x") case x => deserializationError(s"Unexpected number of fields: $x") } unbuilder.endObject() result case None => deserializationError("Expected JsNumber but found None") } } val fruit: Fruit = Apple() "The unionFormat" should { implicit val FruitFormat: JsonFormat[Fruit] = unionFormat2[Fruit, Apple, Orange] val fruitJson = JsObject("value" -> JsObject("x" -> JsNumber(0)), "type" -> JsString("Apple")) "convert a value of ADT to JObject" in { Converter.toJsonUnsafe(fruit) mustEqual fruitJson } "convert JObject back to ADT" in { Converter.fromJsonUnsafe[Fruit](fruitJson) mustEqual fruit } } "The flatUnionFormat" should { implicit val FruitFormat: JsonFormat[Fruit] = flatUnionFormat2[Fruit, Apple, Orange]("type") val fruitJson2 = JsObject("type" -> JsString("Apple"), "x" -> JsNumber(0)) "convert a value of ADT to JObject" in { Converter.toJsonUnsafe(fruit) mustEqual fruitJson2 } "convert JObject back to ADT" in { // println(Converter.fromJsonUnsafe[Fruit](fruitJson2)) Converter.fromJsonUnsafe[Fruit](fruitJson2) mustEqual fruit } } }
Example 48
Source File: HttpBootstrapJsonProtocol.scala From akka-management with Apache License 2.0 | 5 votes |
package akka.management.cluster.bootstrap.contactpoint import akka.actor.{ Address, AddressFromURIString } import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import spray.json.{ DefaultJsonProtocol, JsString, JsValue, RootJsonFormat } trait HttpBootstrapJsonProtocol extends SprayJsonSupport with DefaultJsonProtocol { import HttpBootstrapJsonProtocol._ implicit object AddressFormat extends RootJsonFormat[Address] { override def read(json: JsValue): Address = json match { case JsString(s) => AddressFromURIString.parse(s) case invalid => throw new IllegalArgumentException(s"Illegal address value! Was [$invalid]") } override def write(obj: Address): JsValue = JsString(obj.toString) } implicit val SeedNodeFormat: RootJsonFormat[SeedNode] = jsonFormat1(SeedNode) implicit val ClusterMemberFormat: RootJsonFormat[ClusterMember] = jsonFormat4(ClusterMember) implicit val ClusterMembersFormat: RootJsonFormat[SeedNodes] = jsonFormat2(SeedNodes) } object HttpBootstrapJsonProtocol extends DefaultJsonProtocol { final case class SeedNode(address: Address) // we use Address since we want to know which protocol is being used (tcp, artery, artery-tcp etc) final case class ClusterMember(node: Address, nodeUid: Long, status: String, roles: Set[String]) implicit val clusterMemberOrdering: Ordering[ClusterMember] = Ordering.by(_.node) final case class SeedNodes(selfNode: Address, seedNodes: Set[ClusterMember]) }
Example 49
Source File: CodeCommit.scala From cloudformation-template-generator with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.monsanto.arch.cloudformation.model.resource import com.monsanto.arch.cloudformation.model.Token.TokenSeq import com.monsanto.arch.cloudformation.model._ import spray.json.DefaultJsonProtocol._ import spray.json.{ DeserializationException, JsString, JsValue, JsonFormat, RootJsonFormat } sealed trait CodeCommitEvent extends Product with Serializable object CodeCommitEvent { private type T = CodeCommitEvent case object all extends T case object updateReference extends T case object createReference extends T case object deleteReference extends T implicit lazy val format: JsonFormat[T] = new JsonFormat[T] { def write(t: T): JsValue = JsString(t.toString) def read(json: JsValue): T = json match { case JsString("all") => all case JsString("updateReference") => updateReference case JsString("createReference") => createReference case JsString("deleteReference") => deleteReference case _ => throw DeserializationException(s"Can't parse as CodeCommitEvent: $json") } } } case class CodeCommitTrigger( Branches: Option[TokenSeq[String]] = None, CustomData: Option[String] = None, DestinationArn: Option[Token[String]] = None, Events: Option[Seq[CodeCommitEvent]] = None, Name: String ) object CodeCommitTrigger { private type T = CodeCommitTrigger implicit lazy val format: JsonFormat[T] = jsonFormat5(apply) } case class `AWS::CodeCommit::Repository`( name: String, RepositoryName: String, RepositoryDescription: Option[String] = None, Triggers: Option[Seq[CodeCommitTrigger]] = None, override val Condition: Option[ConditionRef] = None, override val DependsOn: Option[Seq[String]] = None ) extends Resource[`AWS::CodeCommit::Repository`] { override def when(newCondition: Option[ConditionRef]): `AWS::CodeCommit::Repository` = copy(Condition = newCondition) } object `AWS::CodeCommit::Repository` { implicit lazy val format : RootJsonFormat[`AWS::CodeCommit::Repository`] = jsonFormat6(apply) }
Example 50
Source File: EmrSpec.scala From cloudformation-template-generator with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.monsanto.arch.cloudformation.model.resource import org.scalatest.{FunSpec, Matchers} import spray.json.{JsArray, JsObject, JsString, JsonWriter} class EmrSpec extends FunSpec with Matchers { describe("ClusterConfiguration") { it("should write non recursive") { val clusterConfiguration = ClusterConfiguration( Classification = Some("hello"), ConfigurationProperties = Some(Map("hello" -> "world")), Configurations = None ) val json = implicitly[JsonWriter[ClusterConfiguration]].write(clusterConfiguration) json should equal(JsObject(Map( "Classification" -> JsString("hello"), "ConfigurationProperties" -> JsObject( "hello" -> JsString("world") ) ))) } it("should write and read recursive") { val clusterConfiguration = ClusterConfiguration( Classification = Some("hello"), ConfigurationProperties = Some(Map("hello" -> "world")), Configurations = Some(Seq( ClusterConfiguration( Classification = Some("hello1"), ConfigurationProperties = Some(Map("hello2" -> "world3")), Configurations = None ) )) ) val json = implicitly[JsonWriter[ClusterConfiguration]].write(clusterConfiguration) json should equal(JsObject(Map( "Classification" -> JsString("hello"), "ConfigurationProperties" -> JsObject( "hello" -> JsString("world") ), "Configurations" -> JsArray( JsObject(Map( "Classification" -> JsString("hello1"), "ConfigurationProperties" -> JsObject( "hello2" -> JsString("world3") ) )) ) ))) } } }
Example 51
Source File: IAMRole_UT.scala From cloudformation-template-generator with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.monsanto.arch.cloudformation.model.resource import com.monsanto.arch.cloudformation.model.ResourceRef import org.scalatest.{FunSpec, Matchers} import spray.json.{JsObject, JsString, _} class IAMRole_UT extends FunSpec with Matchers { describe("AWS::IAM::Role") { it("should handle both AWS Managed and Customer policies into valid json") { val customerPolicy = `AWS::IAM::ManagedPolicy`("customer-policy", PolicyDocument(Seq())) val awsPolicy = AWSManagedPolicy("AdministratorAccess") val fakePolicyDoc = PolicyDocument(Seq( PolicyStatement( "Allow", Some(DefinedPrincipal(Map("Service" -> Seq("config.amazonaws.com")))), Seq("sts:AssumeRole") ) )) val expectedJson = JsObject( "name" -> JsString("role"), "AssumeRolePolicyDocument" -> fakePolicyDoc.toJson, "ManagedPolicyArns" -> JsArray( JsObject("Ref" -> JsString("customer-policy")), JsString("arn:aws:iam::aws:policy/AdministratorAccess") ) ) val role = `AWS::IAM::Role`( "role", fakePolicyDoc, ManagedPolicyArns = Some(Seq(ResourceRef(customerPolicy), awsPolicy)) ) role.toJson should be(expectedJson) } } }
Example 52
Source File: CloudWatchSpec.scala From cloudformation-template-generator with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.monsanto.arch.cloudformation.model.resource import org.scalatest.{FunSpec, Matchers} import spray.json.{JsonFormat, JsString} class CloudWatchSpec extends FunSpec with Matchers { it("should format AWS/EC2") { implicitly[JsonFormat[`AWS::CloudWatch::Alarm::Namespace`]].write(`AWS::CloudWatch::Alarm::Namespace`.`AWS/EC2`) should equal(JsString("AWS/EC2")) } it("should format custom namespace") { implicitly[JsonFormat[`AWS::CloudWatch::Alarm::Namespace`]].write(`AWS::CloudWatch::Alarm::Namespace`("hello")) should equal(JsString("hello")) } it("should format implicit custom namespace") { implicitly[JsonFormat[`AWS::CloudWatch::Alarm::Namespace`]].write("hello" : `AWS::CloudWatch::Alarm::Namespace`) should equal(JsString("hello")) } }
Example 53
Source File: Subnet_Parameter_List_UT.scala From cloudformation-template-generator with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.monsanto.arch.cloudformation.model.resource import com.monsanto.arch.cloudformation.model._ import org.scalatest.{FunSpec, Matchers} import spray.json.{JsNumber, JsString, _} class Subnet_Parameter_List_UT extends FunSpec with Matchers { describe("AWS::EC2::Subnet_Parameter_List") { it("should serialize into valid json") { val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created") val expectedJson = JsObject( "subnets" -> JsObject( "Description" -> JsString("Select subnets where the RDS instances should be created"), "Type" -> JsString("List<AWS::EC2::Subnet::Id>") ) ) Seq[Parameter](subnetListParam).toJson should be (expectedJson) } it("should serialize into valid json as InputParameter") { val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created") val expectedJson = JsObject( "ParameterKey" -> JsString("subnets"), "ParameterValue" -> JsString("") ) val inputParam = InputParameter.templateParameterToInputParameter(Some(Seq(subnetListParam))) inputParam.get(0).toJson should be (expectedJson) } it("can be passed as ParameterRef to AWS::RDS::DBSubnetGroup") { val subnetListParam = `AWS::EC2::Subnet_Parameter_List`("subnets", "Select subnets where the RDS instances should be created") val dbSubnetGroup = `AWS::RDS::DBSubnetGroup`( name = "dbSubnetGroup", DBSubnetGroupDescription = "DB subnet group", SubnetIds = ParameterRef(subnetListParam) ) val expected = JsObject( "dbSubnetGroup" -> JsObject( "Type" -> JsString("AWS::RDS::DBSubnetGroup"), "Properties" -> JsObject( "DBSubnetGroupDescription" -> JsString("DB subnet group"), "SubnetIds" -> JsObject("Ref" -> JsString("subnets")) ) ) ) Seq[Resource[_]](dbSubnetGroup).toJson should be (expected) } } }
Example 54
Source File: MutationCallbackSchemaExecutor.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.deprecated.actions import com.typesafe.scalalogging.LazyLogging import cool.graph.client.ClientInjector import cool.graph.client.database.{DeferredResolverProvider, SimpleManyModelDeferredResolver, SimpleToManyDeferredResolver} import cool.graph.cuid.Cuid.createCuid import cool.graph.deprecated.actions.schemas.{ActionUserContext, MutationMetaData} import cool.graph.shared.models.{Model, Project} import cool.graph.shared.schema.JsonMarshalling._ import sangria.execution.Executor import sangria.parser.QueryParser import sangria.schema.Schema import spray.json.{JsObject, JsString} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.util.{Failure, Success} case class Event(id: String, url: String, payload: Option[JsObject]) class MutationCallbackSchemaExecutor(project: Project, model: Model, schema: Schema[ActionUserContext, Unit], nodeId: String, fragment: String, url: String, mutationId: String)(implicit injector: ClientInjector) extends LazyLogging { def execute: Future[Event] = { implicit val inj = injector.toScaldi val dataFut = QueryParser.parse(fragment) match { case Success(queryAst) => Executor.execute( schema, queryAst, deferredResolver = new DeferredResolverProvider( new SimpleToManyDeferredResolver, new SimpleManyModelDeferredResolver, skipPermissionCheck = true ), userContext = ActionUserContext( requestId = "", project = project, nodeId = nodeId, mutation = MutationMetaData(id = mutationId, _type = "Create"), log = (x: String) => logger.info(x) ) ) case Failure(error) => Future.successful(JsObject("error" -> JsString(error.getMessage))) } dataFut .map { case JsObject(dataMap) => Event(id = createCuid(), url = url, payload = Some(dataMap("data").asJsObject)) case json => sys.error(s"Must only receive JsObjects here. But got instead: ${json.compactPrint}") } } }
Example 55
Source File: DevFunctionEnvironment.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.shared.functions.dev import akka.actor.ActorSystem import akka.stream.ActorMaterializer import cool.graph.akkautil.http.SimpleHttpClient import cool.graph.cuid.Cuid import cool.graph.shared.functions._ import cool.graph.shared.models.Project import spray.json.{JsArray, JsObject, JsString, _} import scala.concurrent.Future import scala.util.{Failure, Success, Try} case class DevFunctionEnvironment()(implicit system: ActorSystem, materializer: ActorMaterializer) extends FunctionEnvironment { import Conversions._ import system.dispatcher private val httpClient = SimpleHttpClient() override def pickDeploymentAccount(): Option[String] = None val functionEndpointInternal: String = sys.env.getOrElse("FUNCTION_ENDPOINT_INTERNAL", sys.error("FUNCTION_ENDPOINT_INTERNAL env var required for dev function deployment.")).stripSuffix("/") val functionEndpointExternal: String = sys.env.getOrElse("FUNCTION_ENDPOINT_EXTERNAL", sys.error("FUNCTION_ENDPOINT_EXTERNAL env var required for dev function deployment.")).stripSuffix("/") override def getTemporaryUploadUrl(project: Project): String = { val deployId = Cuid.createCuid() s"$functionEndpointExternal/functions/files/${project.id}/$deployId" } override def deploy(project: Project, externalFile: ExternalFile, name: String): Future[DeployResponse] = { httpClient .postJson(s"$functionEndpointInternal/functions/deploy/${project.id}", DeploymentInput(externalFile.url, externalFile.devHandler, name)) .map { response => response.bodyAs[StatusResponse] match { case Success(status) => if (status.success) { DeploySuccess() } else { DeployFailure(new Exception(status.error.getOrElse(""))) } case Failure(e) => DeployFailure(e) } } .recover { case e: Throwable => DeployFailure(e) } } override def invoke(project: Project, name: String, event: String): Future[InvokeResponse] = { httpClient .postJson(s"$functionEndpointInternal/functions/invoke/${project.id}", FunctionInvocation(name, event)) .map { response => response.bodyAs[FunctionInvocationResult] match { case Success(result) => val returnValue = Try { result.value.map(_.toString).getOrElse("").parseJson } match { case Success(parsedJson) => parsedJson case Failure(_) => JsObject("error" -> JsString("Function did not return a valid response. Check your function code / logs.")) } val output = JsObject( "logs" -> JsArray( JsObject("stdout" -> JsString(result.stdout.getOrElse(""))), JsObject("stderr" -> JsString(result.stderr.getOrElse(""))), JsObject("error" -> JsString(result.error.getOrElse(""))) ), "response" -> returnValue ).compactPrint if (result.success) { InvokeSuccess(output) } else { InvokeFailure(new Exception(output)) } case Failure(e) => InvokeFailure(e) } } .recover { case e: Throwable => InvokeFailure(e) } } }
Example 56
Source File: Metrics.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.client import java.util.concurrent.TimeUnit import akka.actor.Actor import cool.graph.cuid.Cuid import cool.graph.shared.externalServices.KinesisPublisher import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString} import scala.collection.mutable import scala.concurrent.duration.FiniteDuration import scala.util.control.NonFatal object FeatureMetric extends Enumeration { type FeatureMetric = Value val Subscriptions = Value("backend/api/subscriptions") val Filter = Value("backend/feature/filter") val NestedMutations = Value("backend/feature/nested-mutation") val ApiSimple = Value("backend/api/simple") val ApiRelay = Value("backend/api/relay") val ApiFiles = Value("backend/api/files") val ServersideSubscriptions = Value("backend/feature/sss") val RequestPipeline = Value("backend/feature/rp") // add this! val PermissionQuery = Value("backend/feature/permission-queries") // add this! val Authentication = Value("backend/feature/authentication") val Algolia = Value("backend/feature/algolia") // add this! val Auth0 = Value("backend/feature/integration-auth0") val Digits = Value("backend/feature/integration-digits") } case class ApiFeatureMetric(ip: String, date: DateTime, projectId: String, clientId: String, usedFeatures: List[String], // Should be false when we can't determine. This is the case for subscriptions. // Is always false for File api. isFromConsole: Boolean) class FeatureMetricActor( metricsPublisher: KinesisPublisher, interval: Int ) extends Actor { import context.dispatcher val metrics = mutable.Buffer.empty[ApiFeatureMetric] val FLUSH = "FLUSH" val tick = context.system.scheduler.schedule( initialDelay = FiniteDuration(interval, TimeUnit.SECONDS), interval = FiniteDuration(interval, TimeUnit.SECONDS), receiver = self, message = FLUSH ) override def postStop() = tick.cancel() def receive = { case metric: ApiFeatureMetric => metrics += metric case FLUSH => flushMetrics() } def flushMetrics() = { val byProject = metrics.groupBy(_.projectId) map { case (projectId, metrics) => JsObject( "requestCount" -> JsNumber(metrics.length), "projectId" -> JsString(projectId), "usedIps" -> JsArray(metrics.map(_.ip).distinct.take(10).toVector.map(JsString(_))), "features" -> JsArray(metrics.flatMap(_.usedFeatures).distinct.toVector.map(JsString(_))), "date" -> JsString(metrics.head.date.toString(DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z").withZoneUTC())), "version" -> JsString("1"), "justConsoleRequests" -> JsBoolean(metrics.forall(_.isFromConsole)) ) } byProject.foreach { json => try { metricsPublisher.putRecord(json.toString, shardId = Cuid.createCuid()) } catch { case NonFatal(e) => println(s"Putting kinesis FeatureMetric failed: ${e.getMessage} ${e.toString}") } } metrics.clear() } }
Example 57
Source File: LambdaLogsSpec.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.functions.lambda import cool.graph.shared.functions.lambda.LambdaFunctionEnvironment import org.scalatest.{FlatSpec, Matchers} import spray.json.{JsObject, JsString} class LambdaLogsSpec extends FlatSpec with Matchers { "Logs parsing for lambda" should "return the correct aggregation of lines" in { val testString = """ |START RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77 Version: $LATEST |2017-10-13T08:24:50.856Z fb6c1b70-afef-11e7-b988-db72e0053f77 getting event {} |2017-10-13T08:24:50.856Z fb6c1b70-afef-11e7-b988-db72e0053f77 requiring event => { | return { | data: { | message: "msg" | } | } |} |2017-10-13T08:24:50.857Z fb6c1b70-afef-11e7-b988-db72e0053f77 {"errorMessage":"Cannot read property 'name' of undefined","errorType":"TypeError","stackTrace":["module.exports.event (/var/task/src/hello2.js:6:47)","executeFunction (/var/task/src/hello2-lambda.js:14:19)","exports.handle (/var/task/src/hello2-lambda.js:9:3)"]} |END RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77 |REPORT RequestId: fb6c1b70-afef-11e7-b988-db72e0053f77 Duration: 1.10 ms Billed Duration: 100 ms Memory Size: 128 MB Max Memory Used: 26 MB """.stripMargin val testString2 = """ |2017-10-23T10:05:04.839Z a426c566-b7d9-11e7-a701-7b78cbef51e9 20 |2017-10-23T10:05:04.839Z a426c566-b7d9-11e7-a701-7b78cbef51e9 null |2017-10-23T10:05:04.839Z a426c566-b7d9-11e7-a701-7b78cbef51e9 { big: 'OBJECT' } """.stripMargin val logs = LambdaFunctionEnvironment.parseLambdaLogs(testString) logs should contain(JsObject("2017-10-13T08:24:50.856Z" -> JsString("getting event {}"))) logs should contain( JsObject("2017-10-13T08:24:50.856Z" -> JsString("requiring event => {\n return {\n data: {\n message: \"msg\"\n }\n }\n}"))) logs should contain(JsObject("2017-10-13T08:24:50.857Z" -> JsString( """{"errorMessage":"Cannot read property 'name' of undefined","errorType":"TypeError","stackTrace":["module.exports.event (/var/task/src/hello2.js:6:47)","executeFunction (/var/task/src/hello2-lambda.js:14:19)","exports.handle (/var/task/src/hello2-lambda.js:9:3)"]}"""))) val logs2 = LambdaFunctionEnvironment.parseLambdaLogs(testString2) logs.length shouldEqual 3 logs2.length shouldEqual 3 logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("20"))) logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("null"))) logs2 should contain(JsObject("2017-10-23T10:05:04.839Z" -> JsString("{ big: 'OBJECT' }"))) } }
Example 58
Source File: CreateSeat.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.system.mutactions.internal import cool.graph.shared.errors.UserInputErrors.CollaboratorProjectWithNameAlreadyExists import cool.graph._ import cool.graph.client.database.DataResolver import cool.graph.shared.externalServices.SnsPublisher import cool.graph.system.database.tables.{ProjectTable, RelayIdTable, SeatTable} import cool.graph.shared.models._ import scaldi.{Injectable, Injector} import slick.jdbc.MySQLProfile.api._ import slick.jdbc.MySQLProfile.backend.DatabaseDef import slick.lifted.TableQuery import spray.json.{JsObject, JsString} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.util.{Failure, Success, Try} case class CreateSeat(client: Client, project: Project, seat: Seat, internalDatabase: DatabaseDef, ignoreDuplicateNameVerificationError: Boolean = false)( implicit inj: Injector) extends SystemSqlMutaction with Injectable { val seatSnsPublisher: SnsPublisher = inject[SnsPublisher](identified by "seatSnsPublisher") if (!seat.clientId.contains(project.ownerId)) { seatSnsPublisher.putRecord( JsObject( "action" -> JsString("ADD"), "projectId" -> JsString(project.id), "projectName" -> JsString(project.name), "email" -> JsString(seat.email), "status" -> JsString(seat.status.toString), "byEmail" -> JsString(client.email), "byName" -> JsString(client.name) ).compactPrint) } override def execute: Future[SystemSqlStatementResult[Any]] = { val seats = TableQuery[SeatTable] val relayIds = TableQuery[RelayIdTable] Future.successful( SystemSqlStatementResult( sqlAction = DBIO .seq( seats += cool.graph.system.database.tables .Seat(id = seat.id, status = seat.status, email = seat.email, clientId = seat.clientId, projectId = project.id), relayIds += cool.graph.system.database.tables.RelayId(seat.id, "Seat") ) )) } override def rollback = Some(DeleteSeat(client, project, seat, internalDatabase).execute) override def verify(): Future[Try[MutactionVerificationSuccess]] = { seat.clientId match { case None => // pending collaborators do not have projects yet. Future.successful(Success(MutactionVerificationSuccess())) case Some(id) => ignoreDuplicateNameVerificationError match { case true => Future.successful(Success(MutactionVerificationSuccess())) case false => val projects = TableQuery[ProjectTable] internalDatabase .run(projects.filter(p => p.clientId === id && p.name === project.name).length.result) .map { case 0 => Success(MutactionVerificationSuccess()) case _ => Failure(CollaboratorProjectWithNameAlreadyExists(name = project.name)) } } } } }
Example 59
Source File: Series.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.config import be.cetic.tsimulus.generators.{Generator, TimeToJson} import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json.{JsObject, JsString, JsValue, _} case class Series[T](name: String, generator: Either[String, Generator[Any]], frequency: Duration) extends TimeToJson { def toJson: JsValue = { val _generator = generator match { case Left(s) => s.toJson case Right(g) => g.toJson } new JsObject(Map( "name" -> name.toJson, "generator" -> _generator, "frequency" -> frequency.toJson )) } } object Series extends TimeToJson { def apply[T](value: JsValue): Series[T] = { val fields = value.asJsObject.fields val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val frequency = fields("frequency").convertTo[Duration] val name = fields("name").convertTo[String] Series(name, generator, frequency) } }
Example 60
Source File: Models.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.config import be.cetic.tsimulus.generators._ import be.cetic.tsimulus.generators.binary._ import be.cetic.tsimulus.generators.composite._ import be.cetic.tsimulus.generators.missing.{DefaultGenerator, LimitedGenerator, PartialGenerator, UndefinedGenerator} import be.cetic.tsimulus.generators.primary._ import be.cetic.tsimulus.generators.dt._ import org.joda.time.LocalDateTime import spray.json.{JsString, _} object Model { def generator(generators: String => Generator[Any])(element: Either[String, Generator[Any]]): Generator[Any] = element match { case Left(s) => generators(s) case Right(g) => g } } case class ARMAModel(phi: Option[Seq[Double]], theta: Option[Seq[Double]], std: Double, c: Double, seed: Option[Long]) object GeneratorFormat extends JsonFormat[Generator[Any]] { def deserializationError(s: String): Generator[Any] = throw DeserializationException(s) def serializationError(s: String): JsValue = throw new SerializationException(s) override def read(json: JsValue): Generator[Any] = json match { case known:JsObject if known.fields.contains("type") => known.fields("type") match{ case JsString("arma") => ARMAGenerator(known) case JsString("daily") => DailyGenerator(known) case JsString("weekly") => WeeklyGenerator(known) case JsString("monthly") => MonthlyGenerator(known) case JsString("yearly") => YearlyGenerator(known) case JsString("constant") => ConstantGenerator(known) case JsString("aggregate") => AggregateGenerator(known) case JsString("divide") => DivideGenerator(known) case JsString("correlated") => CorrelatedGenerator(known) case JsString("logistic") => LogisticGenerator(known) case JsString("conditional") => ConditionalGenerator(known) case JsString("true") => TrueGenerator(known) case JsString("false") => FalseGenerator(known) case JsString("transition") => TransitionGenerator(known) case JsString("binary-transition") => BinaryTransitionGenerator(known) case JsString("window") => SlidingWindowGenerator(known) case JsString("limited") => LimitedGenerator(known) case JsString("partial") => PartialGenerator(known) case JsString("time-shift") => TimeShiftGenerator(known) case JsString("function") => FunctionGenerator(known) case JsString("and") => AndGenerator(known) case JsString("or") => OrGenerator(known) case JsString("not") => NotGenerator(known) case JsString("xor") => XorGenerator(known) case JsString("implies") => ImpliesGenerator(known) case JsString("equiv") => EquivGenerator(known) case JsString("undefined") => UndefinedGenerator(known) case JsString("first-of") => DefaultGenerator(known) case JsString("greater-than") => GreaterThanGenerator(known) case JsString("lesser-than") => LesserThanGenerator(known) case JsString("gaussian") => GaussianNoiseGenerator(known) case JsString("year") => YearGenerator(known) case JsString("month") => MonthGenerator(known) case JsString("dom") => DayOfMonthGenerator(known) case JsString("hour") => HourGenerator(known) case JsString("minute") => MinuteGenerator(known) case JsString("second") => SecondTimeGenerator(known) case JsString("ms") => MillisecondTimeGenerator(known) case JsString("week") => WeekGenerator(known) case JsString("dow") => DayOfWeekGenerator(known) case JsString("doy") => DayOfYearGenerator(known) case JsString("now") => NowGenerator(known) case JsString("dt::diff") => DateTimeDifferenceGenerator(known) case JsString("sinus") => SinusGenerator(known) case unknown => deserializationError(s"unknown Generator object: $unknown") } case unknown => deserializationError(s"unknown Generator object: $unknown") } override def write(obj: Generator[Any]): JsValue = obj.toJson }
Example 61
Source File: MonthGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.MonthTimeSeries import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class MonthGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "month") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new MonthTimeSeries(ts) } override def toString = "MonthGenerator()" override def equals(o: Any) = o match { case that: MonthGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object MonthGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new MonthGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new MonthGenerator(name, Right(base)) def apply(json: JsValue): MonthGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new MonthGenerator(name, base) } }
Example 62
Source File: HourGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.{DayOfYearTimeSeries, HourTimeSeries} import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class HourGenerator(name: Option[String], base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "hour") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new HourTimeSeries(ts) } override def toString = "HourGenerator()" override def equals(o: Any) = o match { case that: HourGenerator => that.name == this.name case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object HourGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new HourGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new HourGenerator(name, Right(base)) def apply(json: JsValue): HourGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new HourGenerator(name, base) } }
Example 63
Source File: DateTimeDifferenceGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.DateTimeDifferenceTimeSeries import org.joda.time.{Duration, LocalDateTime} import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class DateTimeDifferenceGenerator(name: Option[String], val a: Either[String, Generator[LocalDateTime]], val b: Either[String, Generator[LocalDateTime]]) extends Generator[Duration](name, "dt::diff") { override def timeseries(generators: String => Generator[Any]) = { val aTS = Model.generator(generators)(a).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] val bTS = Model.generator(generators)(b).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new DateTimeDifferenceTimeSeries(aTS, bTS) } override def toString = s"DateTimeDifferenceGenerator(${a}, ${b})" override def equals(o: Any) = o match { case that: DateTimeDifferenceGenerator => that.name == this.name && this.a == that.a && this.b == that.b case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "a" -> either2json(a), "b" -> either2json(b) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DateTimeDifferenceGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], a: String, b: String) = new DateTimeDifferenceGenerator(name, Left(a), Left(b)) def apply(name: Option[String], a: String, b: Generator[LocalDateTime]) = new DateTimeDifferenceGenerator(name, Left(a), Right(b)) def apply(name: Option[String], a: Generator[LocalDateTime], b: String) = new DateTimeDifferenceGenerator(name, Right(a), Left(b)) def apply(name: Option[String], a: Generator[LocalDateTime], b: Generator[LocalDateTime]) = new DateTimeDifferenceGenerator(name, Right(a), Right(b)) def apply(json: JsValue): DateTimeDifferenceGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new DateTimeDifferenceGenerator(name, a, b) } }
Example 64
Source File: SecondTimeGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.{MinuteTimeSeries, SecondTimeSeries} import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class SecondTimeGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "second") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new SecondTimeSeries(ts) } override def toString = "SecondTimeGenerator()" override def equals(o: Any) = o match { case that: SecondTimeGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object SecondTimeGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new SecondTimeGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new SecondTimeGenerator(name, Right(base)) def apply(json: JsValue): SecondTimeGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new SecondTimeGenerator(name, base) } }
Example 65
Source File: DayOfWeekGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.DayOfWeekTimeSeries import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class DayOfWeekGenerator(name: Option[String], base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "dow") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new DayOfWeekTimeSeries(ts) } override def toString = "DayOfWeekGenerator()" override def equals(o: Any) = o match { case that: DayOfWeekGenerator => that.name == this.name case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DayOfWeekGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new DayOfWeekGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new DayOfWeekGenerator(name, Right(base)) def apply(json: JsValue): DayOfWeekGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new DayOfWeekGenerator(name, base) } }
Example 66
Source File: DayOfMonthGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.composite.TimeShiftTimeSeries import be.cetic.tsimulus.timeseries.dt.{DayOfMonthTimeSeries, MonthTimeSeries} import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class DayOfMonthGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "dom") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new DayOfMonthTimeSeries(ts) } override def toString = s"DayOfMonthGenerator(${base})" override def equals(o: Any) = o match { case that: DayOfMonthGenerator => that.name == this.name && this.base == that.base case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DayOfMonthGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new DayOfMonthGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new DayOfMonthGenerator(name, Right(base)) def apply(json: JsValue): DayOfMonthGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new DayOfMonthGenerator(name, base) } }
Example 67
Source File: WeekGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.{MinuteTimeSeries, WeekTimeSeries} import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class WeekGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "week") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new WeekTimeSeries(ts) } override def toString = "WeekGenerator()" override def equals(o: Any) = o match { case that: WeekGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object WeekGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new WeekGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new WeekGenerator(name, Right(base)) def apply(json: JsValue): WeekGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new WeekGenerator(name, base) } }
Example 68
Source File: YearGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.YearTimeSeries import org.joda.time.{Duration, LocalDateTime} import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue} import spray.json._ class YearGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "year") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new YearTimeSeries(ts) } override def toString = "YearGenerator()" override def equals(o: Any) = o match { case that: YearGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object YearGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new YearGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new YearGenerator(name, Right(base)) def apply(json: JsValue): YearGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new YearGenerator(name, base) } }
Example 69
Source File: DayOfYearGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.{DayOfWeekTimeSeries, DayOfYearTimeSeries} import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class DayOfYearGenerator(name: Option[String], base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "doy") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new DayOfYearTimeSeries(ts) } override def toString = "DayOfYearGenerator()" override def equals(o: Any) = o match { case that: DayOfYearGenerator => that.name == this.name case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DayOfYearGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new DayOfYearGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new DayOfYearGenerator(name, Right(base)) def apply(json: JsValue): DayOfYearGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new DayOfYearGenerator(name, base) } }
Example 70
Source File: MillisecondTimeGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.MillisecondTimeSeries import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class MillisecondTimeGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "ms") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new MillisecondTimeSeries(ts) } override def toString = "MillisecondTimeGenerator()" override def equals(o: Any) = o match { case that: MillisecondTimeGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object MillisecondTimeGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new MillisecondTimeGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new MillisecondTimeGenerator(name, Right(base)) def apply(json: JsValue): MillisecondTimeGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new MillisecondTimeGenerator(name, base) } }
Example 71
Source File: MinuteGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.dt import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.dt.{MillisecondTimeSeries, MinuteTimeSeries} import org.joda.time.LocalDateTime import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class MinuteGenerator(name: Option[String], val base: Either[String, Generator[LocalDateTime]]) extends Generator[Int](name, "minute") { override def timeseries(generators: String => Generator[Any]) = { val ts = Model.generator(generators)(base).timeseries(generators).asInstanceOf[TimeSeries[LocalDateTime]] new MinuteTimeSeries(ts) } override def toString = "MinuteGenerator()" override def equals(o: Any) = o match { case that: MinuteGenerator => (that.name == this.name) && (that.base == this.base) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "base" -> either2json(base) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object MinuteGenerator extends DefaultJsonProtocol with TimeToJson { def apply(name: Option[String], base: String) = new MinuteGenerator(name, Left(base)) def apply(name: Option[String], base: Generator[LocalDateTime]) = new MinuteGenerator(name, Right(base)) def apply(json: JsValue): MinuteGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val base = fields("base") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g).asInstanceOf[Generator[LocalDateTime]]) } new MinuteGenerator(name, base) } }
Example 72
Source File: ConstantGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.primary import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.primary.ConstantTimeSeries import spray.json.{JsNumber, JsString, JsValue, _} class ConstantGenerator(name: Option[String], val value: Double) extends Generator[Double](name, "constant") { override def timeseries(generators: String => Generator[Any]) = ConstantTimeSeries(value) override def toString = "Constant(" + name + ", " + value + ")" override def equals(o: Any) = o match { case that: ConstantGenerator => that.name == this.name && that.value == this.value case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "value" -> value.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object ConstantGenerator { def apply(json: JsValue): ConstantGenerator = { val fields = json.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val value = fields("value") match { case JsNumber(n) => n.toDouble } new ConstantGenerator(name, value) } }
Example 73
Source File: GaussianNoiseGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.primary import be.cetic.tsimulus.config.ARMAModel import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.primary.GaussianNoiseTimeSeries import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue} import spray.json._ import scala.util.Random class GaussianNoiseGenerator(name: Option[String], val seed: Int, val std: Double) extends Generator[Double](name, "gaussian") { override def timeseries(generators: String => Generator[Any]) = GaussianNoiseTimeSeries(seed, std) override def toString = "GaussianNoise(" + seed + ", " + std + ")" override def equals(o: Any) = o match { case that: GaussianNoiseGenerator => that.name == this.name && that.seed == this.seed && Math.abs(that.std - this.std) < 0.0001 case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "seed" -> seed.toJson, "std" -> std.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object GaussianNoiseGenerator extends DefaultJsonProtocol with TimeToJson { def apply(json: JsValue): GaussianNoiseGenerator = { val fields = json.asJsObject.fields val name = fields.get("name") .map(f => f match { case JsString(x) => x }) val seed = fields("seed").convertTo[Int] val std = fields("std").convertTo[Double] new GaussianNoiseGenerator(name, seed, std) } }
Example 74
Source File: WeeklyGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.primary import java.security.InvalidParameterException import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.primary.WeeklyTimeSeries import org.joda.time.DateTimeConstants import spray.json.{JsNumber, JsObject, JsString, JsValue, _} class WeeklyGenerator(name: Option[String], val points: Map[String, Double]) extends Generator[Double](name, "weekly") { override def timeseries(generators: String => Generator[Any]) = { def day = (s: String) => s match { case "monday" => DateTimeConstants.MONDAY case "tuesday" => DateTimeConstants.TUESDAY case "wednesday" => DateTimeConstants.WEDNESDAY case "thursday" => DateTimeConstants.THURSDAY case "friday" => DateTimeConstants.FRIDAY case "saturday" => DateTimeConstants.SATURDAY case "sunday" => DateTimeConstants.SUNDAY case _ => throw new InvalidParameterException(s"'${s}' is not a valid day name.") } WeeklyTimeSeries(points map {case (k,v) => (day(k), v)}) } override def toString = "WeeklyGenerator(" + name + "," + points + ")" override def equals(o: Any) = o match { case that: WeeklyGenerator => that.name == this.name && that.points == this.points case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "points" -> points.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object WeeklyGenerator { def apply(value: JsValue): WeeklyGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val points = value.asJsObject.fields("points") match { case JsObject(x) => x case _ => throw new ClassCastException } val r = points map { case (k,v) => (k, v match { case JsNumber(x) => x.toDouble })} val validDayNames = List("monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday") val unmatchingDayNames = r.keySet.filterNot(validDayNames contains _) if(!unmatchingDayNames.isEmpty) throw new InvalidParameterException("The following day names are not valid: " + unmatchingDayNames) new WeeklyGenerator(name, r) } }
Example 75
Source File: LesserThanGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.LesserThanTimeSeries import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, RootJsonFormat, _} class LesserThanGenerator( name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]], val strict: Option[Boolean]) extends Generator[Any](name, "lesser-than") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Double] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Double] => t } new LesserThanTimeSeries(first, second, strict match { case None => true case Some(x) => x }) } override def toString = "LesserThan(" + name + ", " + a + ", " + b + ", " + strict + ")" override def equals(o: Any) = o match { case that: LesserThanGenerator => that.name == this.name && that.a == this.a && that.b == this.b && that.strict == this.strict case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) if(strict.isDefined) t = t.updated("strict", strict.toJson) new JsObject(t) } } object LesserThanGenerator extends DefaultJsonProtocol { def apply(value: JsValue): LesserThanGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val strict = fields.get("strict").map(_.convertTo[Boolean]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new LesserThanGenerator(name, a, b, strict) } }
Example 76
Source File: ImpliesGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.{AndTimeSeries, ImpliesTimeSeries} import spray.json.{JsObject, JsString, JsValue, _} class ImpliesGenerator(name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]]) extends Generator[Any](name, "then") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Boolean] => t } new ImpliesTimeSeries(first, second) } override def toString = "Implies(" + name + ", " + a + ", " + b + ")" override def equals(o: Any) = o match { case that: ImpliesGenerator => that.name == this.name && that.a == this.a && that.b == this.b case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object ImpliesGenerator extends DefaultJsonProtocol { def apply(value: JsValue): ImpliesGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new ImpliesGenerator(name, a, b) } }
Example 77
Source File: XorGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.XorTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class XorGenerator(name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]]) extends Generator[Any](name, "xor") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Boolean] => t } new XorTimeSeries(first, second) } override def toString = "Xor(" + name + ", " + a + ", " + b + ")" override def equals(o: Any) = o match { case that: XorGenerator => that.name == this.name && that.a == this.a && that.b == this.b case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object XorGenerator extends DefaultJsonProtocol { def apply(value: JsValue): XorGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new XorGenerator(name, a, b) } }
Example 78
Source File: FalseGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.binary.FalseTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class FalseGenerator(name: Option[String]) extends Generator[Boolean](name, "false") { override def timeseries(generators: (String) => Generator[Any]) = new FalseTimeSeries() override def toString = "False(" + name + ")" override def equals(o: Any) = o match { case that: FalseGenerator => that.name == this.name case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object FalseGenerator { def apply(value: JsValue): FalseGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } new FalseGenerator(name) } }
Example 79
Source File: AndGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.AndTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class AndGenerator(name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]]) extends Generator[Any](name, "and") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Boolean] => t } new AndTimeSeries(first, second) } override def toString = "And(" + name + ", " + a + ", " + b + ")" override def equals(o: Any) = o match { case that: AndGenerator => that.name == this.name && that.a == this.a && that.b == this.b case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object AndGenerator extends DefaultJsonProtocol { def apply(value: JsValue): AndGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new AndGenerator(name, a, b) } }
Example 80
Source File: LogisticGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.LogisticTimeSeries import spray.json.{JsObject, JsString, JsValue, _} import scala.util.Random class LogisticGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val location: Double, val scale: Double, val seed: Option[Int]) extends Generator[Boolean](name, "logistic") { override def timeseries(generators: (String) => Generator[Any]) = { Model.generator(generators)(generator).timeseries(generators) match { case dTS: TimeSeries[Double] => LogisticTimeSeries(dTS, location, scale, seed.getOrElse(Random.nextInt())) case other => throw new ClassCastException(other.toString) } } override def toString = "Logistic(" + name + ", " + generator + ", " + location + ", " + scale + ", " + seed + ")" override def equals(o: Any) = o match { case that: LogisticGenerator => that.name == this.name && that.generator == this.generator && that.location == this.location && that.scale == this.scale && that.seed == this.seed case _ => false } override def toJson: JsValue = { val _generator = (generator match { case Left(s) => s.toJson case Right(g) => g.toJson }).toJson val t = Map( "type" -> `type`.toJson, "generator" -> _generator, "location" -> location.toJson, "scale" -> scale.toJson, "seed" -> seed.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object LogisticGenerator extends TimeToJson { def apply(value: JsValue): LogisticGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val location = fields("location").convertTo[Double] val scale = fields("scale").convertTo[Double] val seed = fields.get("seed").map(_.convertTo[Int]) new LogisticGenerator(name, generator, location, scale, seed) } }
Example 81
Source File: NotGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.NotTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class NotGenerator(name: Option[String], val generator: Either[String, Generator[Any]]) extends Generator[Any](name, "or") { override def timeseries(generators: (String) => Generator[Any]) = { val base = Model.generator(generators)(generator).timeseries(generators) match { case t: TimeSeries[Boolean] => t } NotTimeSeries(base) } override def toString = "Not(" + name + ", " + generator + ")" override def equals(o: Any) = o match { case that: NotGenerator => that.name == this.name && that.generator == this.generator case _ => false } override def toJson: JsValue = { val _generator = generator match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "generator" -> _generator, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object NotGenerator extends DefaultJsonProtocol { def apply(value: JsValue): NotGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new NotGenerator(name, generator) } }
Example 82
Source File: EquivGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.{AndTimeSeries, EquivTimeSeries} import spray.json.{JsObject, JsString, JsValue, _} class EquivGenerator(name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]]) extends Generator[Any](name, "Equiv") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Boolean] => t } new EquivTimeSeries(first, second) } override def toString = "Equiv(" + name + ", " + a + ", " + b + ")" override def equals(o: Any) = o match { case that: EquivGenerator => that.name == this.name && that.a == this.a && that.b == this.b case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object EquivGenerator extends DefaultJsonProtocol { def apply(value: JsValue): EquivGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new EquivGenerator(name, a, b) } }
Example 83
Source File: GreaterThanGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.GreaterThanTimeSeries import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, _} class GreaterThanGenerator( name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]], val strict: Option[Boolean]) extends Generator[Any](name, "greater-than") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Double] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Double] => t } new GreaterThanTimeSeries(first, second, strict match { case None => true case Some(x) => x }) } override def toString = "GreaterThan(" + name + ", " + a + ", " + b + ", " + strict + ")" override def equals(o: Any) = o match { case that: GreaterThanGenerator => that.name == this.name && that.a == this.a && that.b == this.b && that.strict == this.strict case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) if(strict.isDefined) t = t.updated("strict", strict.toJson) new JsObject(t) } } object GreaterThanGenerator extends DefaultJsonProtocol { def apply(value: JsValue): GreaterThanGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val strict = fields.get("strict").map(_.convertTo[Boolean]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new GreaterThanGenerator(name, a, b, strict) } }
Example 84
Source File: OrGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.binary.OrTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class OrGenerator(name: Option[String], val a: Either[String, Generator[Any]], val b: Either[String, Generator[Any]]) extends Generator[Any](name, "or") { override def timeseries(generators: (String) => Generator[Any]) = { val first = Model.generator(generators)(a).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val second = Model.generator(generators)(b).timeseries(generators) match { case t: TimeSeries[Boolean] => t } new OrTimeSeries(first, second) } override def toString = "Or(" + name + ", " + a + ", " + b + ")" override def equals(o: Any) = o match { case that: OrGenerator => that.name == this.name && that.a == this.a && that.b == this.b case _ => false } override def toJson: JsValue = { val _a = a match { case Left(s) => s.toJson case Right(g) => g.toJson } val _b = b match { case Left(s) => s.toJson case Right(g) => g.toJson } var t = Map( "a" -> _a, "b" -> _b, "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object OrGenerator extends DefaultJsonProtocol { def apply(value: JsValue): OrGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val a = fields("a") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val b = fields("b") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new OrGenerator(name, a, b) } }
Example 85
Source File: TrueGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.binary import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.binary.TrueTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class TrueGenerator(name: Option[String]) extends Generator[Boolean](name, "true") { override def timeseries(generators: (String) => Generator[Any]) = { new TrueTimeSeries() } override def toString = "True(" + name + ")" override def equals(o: Any) = o match { case that: TrueGenerator => that.name == this.name case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object TrueGenerator { def apply(value: JsValue): TrueGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } new TrueGenerator(name) } }
Example 86
Source File: TimeShiftGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.composite.TimeShiftTimeSeries import com.github.nscala_time.time.Imports._ import org.joda.time.Duration import spray.json.{JsObject, JsString, JsValue, _} class TimeShiftGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val shift: Duration) extends Generator[Any](name, "time-shift") with TimeToJson { override def timeseries(generators: (String) => Generator[Any]) = { val ts = Model.generator(generators)(generator).timeseries(generators) TimeShiftTimeSeries(ts, shift) } override def toString = "TimeShift(" + name + ", " + generator + ", " + shift.getMillis + ")" override def equals(o: Any) = o match { case that: TimeShiftGenerator => that.name == this.name && that.shift == this.shift case _ => false } override def toJson: JsValue = { var t = Map( "generator" -> either2json(generator), "shift" -> DurationFormat.write(shift), "type" -> `type`.toJson ) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object TimeShiftGenerator extends DefaultJsonProtocol with TimeToJson { def apply(value: JsValue): TimeShiftGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map(_.convertTo[String]) val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val shift = fields("shift").convertTo[Duration] new TimeShiftGenerator(name, generator, shift) } }
Example 87
Source File: CorrelatedGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.composite.CorrelatedTimeSeries import spray.json.{JsObject, JsString, JsValue, _} import scala.util.Random class CorrelatedGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val coef: Double) extends Generator[Double](name, "correlated") { override def timeseries(generators: (String) => Generator[Any]) = { Model.generator(generators)(generator) match { case dDouble : Generator[Double] => CorrelatedTimeSeries(dDouble.timeseries(generators), Random.nextInt(), coef) case _ => throw new ClassCastException } } override def toString = "Correlated(" + name + ", " + generator + ", " + coef + ")" override def equals(o: Any) = o match { case that: CorrelatedGenerator => that.name == this.name && that.generator == this.generator && that.coef == this.coef case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "generator" -> either2json(generator), "coef" -> coef.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object CorrelatedGenerator extends DefaultJsonProtocol { def apply(value: JsValue): CorrelatedGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val `type` = fields("type").convertTo[String] val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val coef = fields("coef").convertTo[Double] new CorrelatedGenerator(name, generator, coef) } }
Example 88
Source File: FunctionGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.composite.FunctionTimeSeries import spray.json.{DefaultJsonProtocol, JsNumber, JsObject, JsString, JsValue, _} class FunctionGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val slope: Double, val intercept: Double) extends Generator[Double](name, "function") { override def timeseries(generators: String => Generator[Any]) = { Model.generator(generators)(generator) match { // Could also be expressed as a Sum(Times(generator, Constant(slope), intercept) case g: Generator[Double] => FunctionTimeSeries[Double](g.timeseries(generators), (t,v) => Some(slope * v + intercept)) case _ => throw new ClassCastException } } override def toString = "Function(" + name + ", " + generator + ", " + slope + ", " + intercept + ")" override def equals(o: Any) = o match { case that: FunctionGenerator => (that.name == this.name && that.generator == this.generator && that.slope == this.slope && that.intercept == this.intercept) case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "generator" -> either2json(generator), "slope" -> slope.toJson, "intercept" -> intercept.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object FunctionGenerator { def apply(json: JsValue): FunctionGenerator = { val fields = json.asJsObject.fields val name = json.asJsObject.fields.get("name").map { case JsString(x) => x } val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val slope = fields("slope") match { case JsNumber(n) => n.toDouble } val intercept = fields("intercept") match { case JsNumber(n) => n.toDouble } new FunctionGenerator(name, generator, slope, intercept) } }
Example 89
Source File: BinaryTransitionGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.composite.TransitionTimeSeries import org.joda.time.{Duration, LocalDateTime} import spray.json.{JsObject, JsString, JsValue, _} class BinaryTransitionGenerator(name: Option[String], val first: Either[String, Generator[Any]], val second: Either[String, Generator[Any]], val time: LocalDateTime) extends Generator[Boolean](name, "binary-transition") { override def timeseries(generators: (String) => Generator[Any]) = { val firstBase = Model.generator(generators)(first).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val secondBase = Model.generator(generators)(second).timeseries(generators) match { case t: TimeSeries[Boolean] => t } TransitionTimeSeries[Boolean](firstBase, secondBase, time, None) } override def toString = "BinaryTransitionGenerator(" + name + "," + first + "," + second + "," + time + ")" override def equals(o: Any) = o match { case that: BinaryTransitionGenerator => that.name == this.name && that.first == this.first && that.second == this.second && that.time == this.time case _ => false } override def toJson: JsValue = { val _first = (first match { case Left(s) => s.toJson case Right(g) => g.toJson }).toJson val _second = (second match { case Left(s) => s.toJson case Right(g) => g.toJson }).toJson var t = Map( "type" -> `type`.toJson, "first" -> _first, "second" -> _second, "time" -> time.toJson ) if(name.isDefined) t = t.updated("name", name.get.toJson) new JsObject(t) } } object BinaryTransitionGenerator extends TimeToJson { def apply(value: JsValue): BinaryTransitionGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val first = fields("first") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val second = fields("second") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val time = fields("time").convertTo[LocalDateTime] new BinaryTransitionGenerator(name, first, second, time) } }
Example 90
Source File: DivideGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.composite.DivideTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class DivideGenerator(name: Option[String], val numerator: Either[String, Generator[Any]], val denominator: Either[String, Generator[Any]]) extends Generator[Double](name, "divide") { override def timeseries(gen: String => Generator[Any]) = { val num = Model.generator(gen)(numerator).timeseries(gen) match { case t: TimeSeries[Double] => t } val den = Model.generator(gen)(denominator).timeseries(gen) match { case t: TimeSeries[Double] => t } new DivideTimeSeries(num, den) } override def toString = "Divide(" + name + ", " + numerator + ", " + denominator + ")" override def equals(o: Any) = o match { case that: DivideGenerator => that.name == this.name && that.numerator == this.numerator && that.denominator == this.denominator case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "numerator" -> either2json(numerator), "denominator" -> either2json(denominator) ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DivideGenerator { def apply(value: JsValue): DivideGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val numerator = fields("numerator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val denominator = fields("denominator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } new DivideGenerator(name, numerator, denominator) } }
Example 91
Source File: AggregateGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config._ import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.composite.AggregationTimeSeries import spray.json.{JsArray, JsObject, JsString, JsValue, _} class AggregateGenerator[U](name: Option[String], val aggregator: String, val generators: Seq[Either[String, Generator[Any]]]) extends Generator[U](name, "aggregate") { override def timeseries(gen: String => Generator[Any]) = { val agg = aggregationFunction(aggregator) val ts = generators.map { case Left(s) => gen(s).timeseries(gen) case Right(g) => g.timeseries(gen) } val series = ts flatMap { case d : TimeSeries[Double] => Some(d) case _ => None } new AggregationTimeSeries[Double, U](agg, series) } override def toString = "Aggregate(" + name + ", " + aggregator + ", " + generators.mkString("[", ", ", "]") + ")" override def equals(o: Any) = o match { case that: AggregateGenerator[U] => that.name == this.name && that.aggregator == this.aggregator && that.generators == this.generators case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "aggregator" -> aggregator.toJson, "generators" -> generators.map(either2json).toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object AggregateGenerator extends DefaultJsonProtocol { def apply[U](value: JsValue): AggregateGenerator[U] = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val aggregator = fields("aggregator").convertTo[String] val generators = fields("generators") match { case JsArray(x) => x.map { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) }.toList } new AggregateGenerator(name, aggregator, generators) } }
Example 92
Source File: ConditionalGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.composite import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.composite.ConditionalTimeSeries import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class ConditionalGenerator(name: Option[String], val condition: Either[String, Generator[Any]], val success: Either[String, Generator[Any]], val failure: Option[Either[String, Generator[Any]]]) extends Generator[Any](name, "conditional") { override def timeseries(generators: (String) => Generator[Any]) = { val cond = Model.generator(generators)(condition).timeseries(generators) match { case t: TimeSeries[Boolean] => t } val a = Model.generator(generators)(success).timeseries(generators) match { case t: TimeSeries[Any] => t } val b = failure.map(f => Model.generator(generators)(f).timeseries(generators) match { case t: TimeSeries[Any] => t }).getOrElse(new UndefinedTimeSeries()) ConditionalTimeSeries(cond, a, b) } override def toString = "Conditional(" + name + ", " + condition + ", " + success + ", " + failure + ")" override def equals(o: Any) = o match { case that: ConditionalGenerator => that.name == this.name && that.condition == this.condition && that.success == this.success && that.failure == this.failure case _ => false } override def toJson: JsValue = { var t = Map( "type" -> `type`.toJson, "condition" -> either2json(condition), "success" -> either2json(success) ) if(failure.isDefined) { val _failure = (failure.get match { case Left(s) => s.toJson case Right(g) => g.toJson }).toJson t = t.updated("failure", _failure) } new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object ConditionalGenerator { def apply(value: JsValue): ConditionalGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val condition = fields("condition") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val success = fields("success") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val failure = if(fields.contains("failure")) fields("failure") match { case JsString(s) => Some(Left(s)) case g => Some(Right(GeneratorFormat.read(g))) } else None new ConditionalGenerator(name, condition, success, failure) } }
Example 93
Source File: PartialGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.missing import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.missing.PartialTimeSeries import org.joda.time.LocalDateTime import spray.json.{JsObject, JsString, JsValue, _} class PartialGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val from: Option[LocalDateTime], val to: Option[LocalDateTime], val missingRate: Option[Double]) extends Generator[Any](name, "partial") { override def timeseries(generators: (String) => Generator[Any]) = { val ts = Model.generator(generators)(generator).timeseries(generators) PartialTimeSeries(ts, from, to, missingRate) } override def toString = "Partial(" + name + ", " + generator + ", " + from + ", " + to + ", " + missingRate + ")" override def equals(o: Any) = o match { case that: PartialGenerator => that.name == this.name && that.generator == this.generator && that.from == this.from && that.to == this.to && that.missingRate == this.missingRate case _ => false } override def toJson: JsValue = { var t = Map( "type" -> `type`.toJson, "generator" -> either2json(generator), "from" -> from.toJson, "to" -> to.toJson ) if(missingRate.isDefined) t = t.updated("missing-rate" , missingRate.toJson) if(name.isDefined) t = t.updated("name", name.toJson) new JsObject(t) } } object PartialGenerator extends DefaultJsonProtocol with TimeToJson { def apply(value: JsValue): PartialGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val from = fields.get("from").map(_.convertTo[LocalDateTime]) val to = fields.get("to").map(_.convertTo[LocalDateTime]) val missingRate = fields.get("missing-rate").map(_.convertTo[Double]) new PartialGenerator(name, generator, from, to, missingRate) } }
Example 94
Source File: LimitedGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.missing import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.missing.LimitedTimeSeries import org.joda.time.LocalDateTime import spray.json.{JsObject, JsString, JsValue, _} class LimitedGenerator(name: Option[String], val generator: Either[String, Generator[Any]], val from: Option[LocalDateTime], val to: Option[LocalDateTime]) extends Generator[Any](name, "limited") { override def timeseries(generators: (String) => Generator[Any]) = LimitedTimeSeries(Model.generator(generators)(generator).timeseries(generators), from, to) override def toString = "Limited(" + name + ", " + generator + ", " + from + ", " + to + ")" override def equals(o: Any) = o match { case that: LimitedGenerator => that.name == this.name && that.generator == this.generator && that.from == this.from && that.to == this.to case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "generator" -> either2json(generator), "from" -> from.get.toJson, "to" -> to.get.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object LimitedGenerator extends DefaultJsonProtocol with TimeToJson { def apply(value: JsValue): LimitedGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val generator = fields("generator") match { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } val from = fields.get("from").map(_.convertTo[LocalDateTime]) val to = fields.get("to").map(_.convertTo[LocalDateTime]) val missingRate = fields.get("missing-rate").map(_.convertTo[Double]) new LimitedGenerator(name, generator, from, to) } }
Example 95
Source File: DefaultGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.missing import be.cetic.tsimulus.config.{GeneratorFormat, Model} import be.cetic.tsimulus.generators.{Generator, TimeToJson} import be.cetic.tsimulus.timeseries.TimeSeries import be.cetic.tsimulus.timeseries.missing.DefaultTimeSeries import spray.json.{JsArray, JsObject, JsString, JsValue, _} class DefaultGenerator(name: Option[String], val gens: Seq[Either[String, Generator[Any]]]) extends Generator[Any](name, "first-of") { override def timeseries(generators: (String) => Generator[Any]) = { val underlyings = gens.map(g => Model.generator(generators)(g).timeseries(generators) match { case t: TimeSeries[Any] => t }) DefaultTimeSeries(underlyings) } override def toString = "UndefinedGenerator(" + name + "," + gens + ")" override def equals(o: Any) = o match { case that: DefaultGenerator => that.gens == this.gens case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson, "generators" -> gens.map(either2json).toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object DefaultGenerator { def apply(value: JsValue): DefaultGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } val generators = fields("generators") match { case JsArray(l) => l.map { case JsString(s) => Left(s) case g => Right(GeneratorFormat.read(g)) } } new DefaultGenerator(name, generators) } }
Example 96
Source File: UndefinedGenerator.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators.missing import be.cetic.tsimulus.generators.Generator import be.cetic.tsimulus.timeseries.missing.UndefinedTimeSeries import spray.json.{JsObject, JsString, JsValue, _} class UndefinedGenerator(name: Option[String]) extends Generator[Any](name, "undefined") { override def timeseries(generators: (String) => Generator[Any]) = new UndefinedTimeSeries() override def toString = "UndefinedGenerator(" + name + ")" override def equals(o: Any) = o match { case that: UndefinedGenerator => that.name == this.name case _ => false } override def toJson: JsValue = { val t = Map( "type" -> `type`.toJson ) new JsObject( name.map(n => t + ("name" -> n.toJson)).getOrElse(t) ) } } object UndefinedGenerator { def apply(value: JsValue): UndefinedGenerator = { val fields = value.asJsObject.fields val name = fields.get("name").map { case JsString(x) => x } new UndefinedGenerator(name) } }
Example 97
Source File: TimeToJson.scala From TSimulus with Apache License 2.0 | 5 votes |
package be.cetic.tsimulus.generators import com.github.nscala_time.time.Imports._ import org.joda.time.format.DateTimeFormatterBuilder import org.joda.time.{Duration, LocalDateTime, LocalTime} import spray.json.{JsString, JsValue, RootJsonFormat, _} trait TimeToJson extends DefaultJsonProtocol { val dtf = DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss.SSS") val ttf = DateTimeFormat.forPattern("HH:mm:ss.SSS") val datetimeFormatter = { val parsers = Array( DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss.SSS").getParser, DateTimeFormat.forPattern("YYYY-MM-dd HH:mm:ss").getParser ) new DateTimeFormatterBuilder().append(null, parsers).toFormatter() } val timeFormatter = { val parsers = Array( DateTimeFormat.forPattern("HH:mm:ss.SSS").getParser, DateTimeFormat.forPattern("HH:mm:ss").getParser ) new DateTimeFormatterBuilder().append(null, parsers).toFormatter() } implicit object LocalDateTimeJsonFormat extends RootJsonFormat[LocalDateTime] { def write(d: LocalDateTime) = JsString(dtf.print(d)) def read(value: JsValue) = value match { case JsString(s) => datetimeFormatter.parseLocalDateTime(s) case unrecognized => serializationError(s"Serialization problem $unrecognized") } } implicit object LocalTimeJsonFormat extends RootJsonFormat[LocalTime] { def write(t: LocalTime) = JsString(ttf.print(t)) def read(value: JsValue) = value match { case JsString(s) => timeFormatter.parseLocalTime(s) case unknown => deserializationError(s"unknown LocalTime object: $unknown") } } implicit object DurationFormat extends RootJsonFormat[Duration] { def write(d: Duration) = d.getMillis.toJson def read(value: JsValue) = new Duration(value.toString.toLong) } def either2json(element: Either[String,Generator[Any]]) = element match { case Left(s) => s.toJson case Right(g) => g.toJson } }
Example 98
Source File: WorkflowMetadataConverter.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.workflowmanager.versionconverter import spray.json.{JsObject, JsString, JsValue} import ai.deepsense.commons.utils.Version object WorkflowMetadataConverter { object Js { val apiVersion14 = "1.4.0" val apiVersion13 = "1.3.0" val apiVersion12 = "1.2.0" val evaluate13id = "a88eaf35-9061-4714-b042-ddd2049ce917" val fit13id = "0c2ff818-977b-11e5-8994-feff819cdc9f" val fitPlusTransform13id = "1cb153f1-3731-4046-a29b-5ad64fde093f" val gridSearch13id = "9163f706-eaaf-46f6-a5b0-4114d92032b7" val transform13id = "643d8706-24db-4674-b5b4-10b5129251fc" val customTransformer13id = "65240399-2987-41bd-ba7e-2944d60a3404" val readDataFrame13Id = "c48dd54c-6aef-42df-ad7a-42fc59a09f0e" val writeDataFrame13Id = "9e460036-95cc-42c5-ba64-5bc767a40e4e" val readDatasource14Id = "1a3b32f0-f56d-4c44-a396-29d2dfd43423" val writeDatasource14Id = "bf082da2-a0d9-4335-a62f-9804217a1436" val readDatasourceName = "Read Datasource" val writeDatasourceName = "Write Datasource" val apiVersion = "apiVersion" val connections = "connections" val id = "id" val name = "name" val innerWorkflow = "inner workflow" val metadata = "metadata" val nodeId = "nodeId" val nodes = "nodes" val operation = "operation" val parameters = "parameters" val portIndex = "portIndex" val to = "to" val workflow = "workflow" val datasourceId = "data source" } def setWorkflowVersion(workflow: JsValue, targetVersion: Version): JsValue = { val fields = workflow.asJsObject.fields val oldMetadata = fields.getOrElse(Js.metadata, JsObject()).asJsObject val newMetadata = convertMetadata(oldMetadata, targetVersion.humanReadable) JsObject(fields + (Js.metadata -> newMetadata)) } def convertMetadata(metadata: JsValue, targetVersion: String): JsValue = JsObject(metadata.asJsObject.fields.updated(Js.apiVersion, new JsString(targetVersion))) }
Example 99
Source File: FileEntry.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.libraryservice import java.io.File import spray.json.{DefaultJsonProtocol, DeserializationException, JsString, JsValue, JsonFormat} import ai.deepsense.commons.json.EnumerationSerializer._ case class FileEntry private[libraryservice] ( name: String, kind: FileType.Value, children: Seq[FileEntry]) object FileEntry { def fromFile(file: java.io.File): FileEntry = { FileEntry(file.getName, FileType.File, Nil) } def fromDirectory(directory: java.io.File): FileEntry = { val sortedFiles = directory.listFiles.sortBy(f => (f.isFile, f.getName)) val children = sortedFiles.map(fileToFileEntry) FileEntry(directory.getName, FileType.Directory, children) } def fileToFileEntry(f: File): FileEntry = { if (f.isFile) { fromFile(f) } else { fromDirectory(f) } } } object FileType extends Enumeration { val File = Value("file") val Directory = Value("directory") implicit val fileTypeJsonFormat = jsonEnumFormat(FileType) } object FileEntryJsonProtocol extends DefaultJsonProtocol { implicit val fileEntryJsonFormat: JsonFormat[FileEntry] = lazyFormat(jsonFormat3(FileEntry.apply)) }
Example 100
Source File: GlobalMQDeserializerSpec.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.StandardCharsets import org.scalatest.mockito.MockitoSugar import spray.json.{JsArray, JsNull, JsObject, JsString} import ai.deepsense.commons.StandardSpec import ai.deepsense.models.workflows.Workflow import ai.deepsense.workflowexecutor.communication.message.global._ import ai.deepsense.workflowexecutor.communication.mq.json.Global.GlobalMQDeserializer class GlobalMQDeserializerSpec extends StandardSpec with MockitoSugar { "GlobalMQDeserializer" should { "deserialize Launch messages" in { val workflowId = Workflow.Id.randomId val nodesToExecute = Vector(Workflow.Id.randomId, Workflow.Id.randomId, Workflow.Id.randomId) val jsNodesToExecute = JsArray(nodesToExecute.map(id => JsString(id.toString))) val rawMessage = JsObject( "messageType" -> JsString("launch"), "messageBody" -> JsObject( "workflowId" -> JsString(workflowId.toString), "nodesToExecute" -> jsNodesToExecute ) ) val readMessage: Any = serializeAndRead(rawMessage) readMessage shouldBe Launch(workflowId, nodesToExecute.toSet) } "deserialize Heartbeat messages" in { val workflowId = "foo-workflow" val rawMessage = JsObject( "messageType" -> JsString("heartbeat"), "messageBody" -> JsObject( "workflowId" -> JsString(workflowId), "sparkUiAddress" -> JsNull)) serializeAndRead(rawMessage) shouldBe Heartbeat(workflowId, None) } "deserialize PoisonPill messages" in { val rawMessage = JsObject( "messageType" -> JsString("poisonPill"), "messageBody" -> JsObject()) serializeAndRead(rawMessage) shouldBe PoisonPill() } "deserialize Ready messages" in { val sessionId = "foo-session" val rawMessage = JsObject( "messageType" -> JsString("ready"), "messageBody" -> JsObject( "sessionId" -> JsString(sessionId))) serializeAndRead(rawMessage) shouldBe Ready(sessionId) } } private def serializeAndRead( rawMessage: JsObject): Any = { val bytes = rawMessage.compactPrint.getBytes(StandardCharsets.UTF_8) GlobalMQDeserializer.deserializeMessage(bytes) } }
Example 101
Source File: ParamsSerialization.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.deeplang.doperables.serialization import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue} import ai.deepsense.deeplang.catalogs.doperable.exceptions.NoParameterlessConstructorInClassException import ai.deepsense.deeplang.params.Params import ai.deepsense.deeplang.{CatalogRecorder, ExecutionContext, TypeUtils} import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader trait ParamsSerialization { self: Params => def saveObjectWithParams(ctx: ExecutionContext, path: String): Unit = { saveMetadata(ctx, path) saveParams(ctx, path) } def loadAndSetParams(ctx: ExecutionContext, path: String): this.type = { setParams(loadParams(ctx, path), ctx.inferContext.graphReader) } protected def saveMetadata(ctx: ExecutionContext, path: String) = { val metadataFilePath = ParamsSerialization.metadataFilePath(path) val metadataJson = JsObject( ParamsSerialization.classNameKey -> JsString(this.getClass.getName) ) JsonObjectPersistence.saveJsonToFile(ctx, metadataFilePath, metadataJson) } protected def saveParams(ctx: ExecutionContext, path: String): Unit = { val paramsFilePath = ParamsSerialization.paramsFilePath(path) JsonObjectPersistence.saveJsonToFile(ctx, paramsFilePath, paramValuesToJson) } protected def loadParams(ctx: ExecutionContext, path: String): JsValue = { JsonObjectPersistence.loadJsonFromFile(ctx, ParamsSerialization.paramsFilePath(path)) } private def setParams(paramsJson: JsValue, graphReader: GraphReader): this.type = { this.set(paramPairsFromJson(paramsJson, graphReader): _*) } } object ParamsSerialization { val classNameKey = "className" val paramsFileName = "params" val metadataFileName = "metadata" def load(ctx: ExecutionContext, path: String): Loadable = { import DefaultJsonProtocol._ val metadataPath = metadataFilePath(path) val metadataJson: JsObject = JsonObjectPersistence.loadJsonFromFile(ctx, metadataPath).asJsObject val className = metadataJson.fields(classNameKey).convertTo[String] val clazz: Class[_] = Class.forName(className) val loadable = TypeUtils.createInstance(TypeUtils.constructorForClass(clazz) .getOrElse(throw new NoParameterlessConstructorInClassException(clazz.getCanonicalName)) ).asInstanceOf[Loadable] loadable.load(ctx, path) } def metadataFilePath(path: String): String = { PathsUtils.combinePaths(path, metadataFileName) } def paramsFilePath(path: String): String = { PathsUtils.combinePaths(path, paramsFileName) } }
Example 102
Source File: WorkflowParamSpec.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.deeplang.params import spray.json.{JsString, _} import ai.deepsense.deeplang.DOperationCategories import ai.deepsense.deeplang.catalogs.SortPriority import ai.deepsense.deeplang.catalogs.doperations.DOperationsCatalog import ai.deepsense.deeplang.doperations.custom.{Sink, Source} import ai.deepsense.deeplang.params.custom.InnerWorkflow import ai.deepsense.models.json.graph.GraphJsonProtocol.GraphReader class WorkflowParamSpec extends AbstractParamSpec[InnerWorkflow, WorkflowParam] { override def className: String = "WorkflowParam" override def graphReader: GraphReader = { val catalog = DOperationsCatalog() catalog.registerDOperation(DOperationCategories.IO, () => Source(), SortPriority.coreDefault) catalog.registerDOperation(DOperationCategories.IO, () => Sink(), SortPriority.coreDefault) new GraphReader(catalog) } override def paramFixture: (WorkflowParam, JsValue) = { val description = "Workflow parameter description" val param = WorkflowParam( name = "Workflow parameter name", description = Some(description)) val expectedJson = JsObject( "type" -> JsString("workflow"), "name" -> JsString(param.name), "description" -> JsString(description), "isGriddable" -> JsFalse, "default" -> JsNull ) (param, expectedJson) } override def valueFixture: (InnerWorkflow, JsValue) = { val innerWorkflow = InnerWorkflow.empty val sourceNode = JsObject( "id" -> JsString(innerWorkflow.source.id.toString), "operation" -> JsObject( "id" -> JsString(Source.id.toString), "name" -> JsString("Source") ), "parameters" -> JsObject() ) val sinkNode = JsObject( "id" -> JsString(innerWorkflow.sink.id.toString), "operation" -> JsObject( "id" -> JsString(Sink.id.toString), "name" -> JsString("Sink") ), "parameters" -> JsObject() ) val workflow = JsObject( "nodes" -> JsArray(sourceNode, sinkNode), "connections" -> JsArray() ) val value = JsObject( "workflow" -> workflow, "thirdPartyData" -> JsObject(), "publicParams" -> JsArray() ) (innerWorkflow, value) } }
Example 103
Source File: LogCollectorSpec.scala From shield with MIT License | 5 votes |
package shield.actors.listeners import akka.actor.{ActorRef, ActorSystem} import akka.testkit.{ImplicitSender, TestKit} import org.scalatest.WordSpecLike import org.specs2.matcher.MustMatchers import shield.config.{DomainSettings, Settings} import spray.http.HttpHeaders.RawHeader import spray.http.HttpRequest import spray.json.JsString class LogCollectorSpec extends TestKit(ActorSystem("testSystem")) // Using the ImplicitSender trait will automatically set `testActor` as the sender with ImplicitSender with WordSpecLike with MustMatchers { import akka.testkit.TestActorRef val settings = Settings(system) val domainSettings = new DomainSettings(settings.config.getConfigList("shield.domains").get(0), system) val actorRef = TestActorRef(new LogCollector("1",domainSettings,Seq[ActorRef](),5)) val actor = actorRef.underlyingActor "LogCollector" should { "Extracts headers and adds them to access logs" in { val request = HttpRequest().withHeaders( RawHeader("sample", "header"), RawHeader("test", "test1"), RawHeader("test2", "123"), RawHeader("test-header-3", "abc"), RawHeader("hh", "aaa"), RawHeader("hhh", "bbb") ) val extractedHeaders = actor.extractHeaders(request.headers, Set("test-header-3", "hh", "sample", "DNE")) extractedHeaders.keys.size must be equalTo 3 extractedHeaders.get("hh").get must be equalTo JsString("aaa") extractedHeaders.get("test-header-3").get must be equalTo JsString("abc") extractedHeaders.get("sample").get must be equalTo JsString("header") } } }
Example 104
Source File: JobDetailsRecord.scala From mist with Apache License 2.0 | 5 votes |
package io.hydrosphere.mist.master import io.hydrosphere.mist.core.CommonData.{Action, JobParams} import io.hydrosphere.mist.master.interfaces.JsonCodecs import JsonCodecs._ import mist.api.data.{JsData, JsMap} import spray.json.{JsObject, JsString, enrichAny, enrichString} case class JobDetailsRecord ( path: String, className: String, namespace: String, parameters: String, externalId: Option[String], function: Option[String], action: String, source: String, jobId: String, startTime: Option[Long], endTime: Option[Long], jobResult: Option[String], status: String, workerId: Option[String], createTime: Long) { def toJobDetails: JobDetails = { JobDetails(function.get, jobId, JobParams(path, className, parameters.parseJson.convertTo[JsMap], toAction(action)), namespace, externalId, JobDetails.Source(source), startTime, endTime, toResult(jobResult), JobDetails.Status(status), workerId, createTime) } def toResult(stringOption: Option[String]): Option[Either[String, JsData]] = { stringOption match { case Some(string) => string.parseJson match { case obj @ JsObject(fields) => val maybeErr = fields.get("error").flatMap({ case JsString(err) => Some(err) case x => None }) maybeErr match { case None => Some(Right(fields.get("result").get.convertTo[JsData])) case Some(err) => Some(Left(err)) } case JsString(err) => Some(Left(err)) case _ => throw new IllegalArgumentException(s"can not deserialize $string to Job response") } case None => None } } def toAction(action: String) = { action match { case "serve" => Action.Serve case "train" => Action.Execute case _ => Action.Execute } } } object JobDetailsRecord { def apply(jd: JobDetails): JobDetailsRecord = { val jp: JobParams = jd.params new JobDetailsRecord(jp.filePath, jp.className, jd.context, jp.arguments.toJson.compactPrint, jd.externalId, Some(jd.function), jp.action.toString, jd.source.toString, jd.jobId, jd.startTime, jd.endTime, jobResponseToString(jd.jobResult), jd.status.toString, jd.workerId, jd.createTime ) } def jobResponseToString(jobResponseOrError: Option[Either[String, JsData]]): Option[String] = { jobResponseOrError match { case Some(response) => val jsValue = response match { case Left(err) => JsObject("error" -> JsString(err)) case Right(data) => JsObject("result" -> data.toJson) } Some(jsValue.compactPrint) case None => None } } }
Example 105
Source File: EchoEnumService.scala From swagger-akka-http-sample with Apache License 2.0 | 5 votes |
package com.example.akka.echoenum import akka.http.scaladsl.server.{Directives, Route} import com.example.akka.DefaultJsonFormats import com.fasterxml.jackson.core.`type`.TypeReference import com.fasterxml.jackson.module.scala.JsonScalaEnumeration import io.swagger.v3.oas.annotations.Operation import io.swagger.v3.oas.annotations.media.{Content, Schema} import io.swagger.v3.oas.annotations.parameters.RequestBody import io.swagger.v3.oas.annotations.responses.ApiResponse import javax.ws.rs.core.MediaType import javax.ws.rs.{Consumes, POST, Path, Produces} import spray.json.{DeserializationException, JsString, JsValue, RootJsonFormat} @Path("/echoenum") object EchoEnumService extends Directives with DefaultJsonFormats { //case class EchoEnum(@Schema(required = true, `type` = "string", allowableValues = Array("TALL", "GRANDE", "VENTI")) // enumValue: SizeEnum.Value) class SizeEnumTypeClass extends TypeReference[SizeEnum.type] case class EchoEnum(@JsonScalaEnumeration(classOf[SizeEnumTypeClass]) enumValue: SizeEnum.Value) implicit val enumFormat: RootJsonFormat[SizeEnum.Value] = new RootJsonFormat[SizeEnum.Value] { def write(obj: SizeEnum.Value): JsValue = JsString(obj.toString) def read(json: JsValue): SizeEnum.Value = { json match { case JsString(txt) => SizeEnum.withName(txt) case somethingElse => throw DeserializationException(s"Expected a value from enum $SizeEnum instead of $somethingElse") } } } implicit val echoEnumFormat: RootJsonFormat[EchoEnum] = jsonFormat1(EchoEnum) val route: Route = echo @POST @Consumes(Array(MediaType.APPLICATION_JSON)) @Produces(Array(MediaType.APPLICATION_JSON)) @Operation(summary = "Echo Enum", description = "Echo Enum", requestBody = new RequestBody(content = Array(new Content(schema = new Schema(implementation = classOf[EchoEnum])))), responses = Array( new ApiResponse(responseCode = "200", description = "Echo Enum", content = Array(new Content(schema = new Schema(implementation = classOf[EchoEnum])))), new ApiResponse(responseCode = "400", description = "Bad Request")) ) def echo: Route = path("echoenum") { post { entity(as[EchoEnum]) { request => complete(request) } } } }
Example 106
Source File: TransformersSpec.scala From streamliner-starter with Apache License 2.0 | 5 votes |
package test import com.memsql.spark.etl.api.UserTransformConfig import com.memsql.spark.etl.utils.ByteUtils import com.memsql.streamliner.starter.BasicTransformer import org.apache.spark.sql.{Row, SQLContext} import org.apache.spark.sql.types._ import spray.json.JsString class TransformersSpec extends UnitSpec with LocalSparkContext { val emptyConfig = UserTransformConfig(class_name = "Test", value = new JsString("empty")) val logger = new TestLogger("test") var sqlContext: SQLContext = _ override def beforeEach(): Unit = { super.beforeEach() sqlContext = new SQLContext(sc) } "BasicTransformer" should "only emit even numbers" in { val transform = new BasicTransformer val schema = StructType(StructField("number", IntegerType, false) :: Nil) val sampleData = List(1,2,3) val rowRDD = sqlContext.sparkContext.parallelize(sampleData).map(Row(_)) val dfIn = sqlContext.createDataFrame(rowRDD, schema) val df = transform.transform(sqlContext, dfIn, emptyConfig, logger) assert(df.schema == schema) assert(df.first == Row(2)) assert(df.count == 1) } "BasicTransformer" should "only accept IntegerType fields" in { val transform = new BasicTransformer val schema = StructType(StructField("column", StringType, false) :: Nil) val sampleData = List(1,2,3) val rowRDD = sqlContext.sparkContext.parallelize(sampleData).map(Row(_)) val dfIn = sqlContext.createDataFrame(rowRDD, schema) val e = intercept[IllegalArgumentException] { transform.transform(sqlContext, dfIn, emptyConfig, logger) } assert(e.getMessage() == "The first column of the input DataFrame should be IntegerType") } }
Example 107
Source File: ExtractorsSpec.scala From streamliner-starter with Apache License 2.0 | 5 votes |
package test import com.memsql.spark.etl.api.UserExtractConfig import com.memsql.spark.etl.utils.ByteUtils import spray.json.JsString import com.memsql.streamliner.starter.BasicExtractor import org.apache.spark.streaming._ import org.apache.spark.sql.SQLContext class ExtractorsSpec extends UnitSpec with LocalSparkContext { val emptyConfig = UserExtractConfig(class_name = "Test", value = new JsString("empty")) val logger = new TestLogger("test") var ssc: StreamingContext = _ var sqlContext: SQLContext = _ override def beforeEach(): Unit = { super.beforeEach() ssc = new StreamingContext(sc, Seconds(1)) sqlContext = new SQLContext(sc) } "BasicExtractor" should "emit a constant DataFrame" in { val extract = new BasicExtractor val maybeDf = extract.next(ssc, 1, sqlContext, emptyConfig, 1, logger) assert(maybeDf.isDefined) val total = maybeDf.get.select("number").rdd.map(r => r(0).asInstanceOf[Int]).sum() assert(total == 15) } }
Example 108
Source File: GlobalMQDeserializerSpec.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.workflowexecutor.communication.mq.json import java.nio.charset.StandardCharsets import org.scalatest.mockito.MockitoSugar import spray.json.{JsArray, JsObject, JsString} import io.deepsense.commons.StandardSpec import io.deepsense.models.workflows.Workflow import io.deepsense.workflowexecutor.communication.message.global._ import io.deepsense.workflowexecutor.communication.mq.json.Global.GlobalMQDeserializer class GlobalMQDeserializerSpec extends StandardSpec with MockitoSugar { "GlobalMQDeserializer" should { "deserialize Launch messages" in { val workflowId = Workflow.Id.randomId val nodesToExecute = Vector(Workflow.Id.randomId, Workflow.Id.randomId, Workflow.Id.randomId) val jsNodesToExecute = JsArray(nodesToExecute.map(id => JsString(id.toString))) val rawMessage = JsObject( "messageType" -> JsString("launch"), "messageBody" -> JsObject( "workflowId" -> JsString(workflowId.toString), "nodesToExecute" -> jsNodesToExecute ) ) val readMessage: Any = serializeAndRead(rawMessage) readMessage shouldBe Launch(workflowId, nodesToExecute.toSet) } "deserialize Heartbeat messages" in { val workflowId = "foo-workflow" val rawMessage = JsObject( "messageType" -> JsString("heartbeat"), "messageBody" -> JsObject( "workflowId" -> JsString(workflowId))) serializeAndRead(rawMessage) shouldBe Heartbeat(workflowId) } "deserialize PoisonPill messages" in { val rawMessage = JsObject( "messageType" -> JsString("poisonPill"), "messageBody" -> JsObject()) serializeAndRead(rawMessage) shouldBe PoisonPill() } "deserialize Ready messages" in { val sessionId = "foo-session" val rawMessage = JsObject( "messageType" -> JsString("ready"), "messageBody" -> JsObject( "sessionId" -> JsString(sessionId))) serializeAndRead(rawMessage) shouldBe Ready(sessionId) } } private def serializeAndRead( rawMessage: JsObject): Any = { val bytes = rawMessage.compactPrint.getBytes(StandardCharsets.UTF_8) GlobalMQDeserializer.deserializeMessage(bytes) } }
Example 109
Source File: ParamsSerialization.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.deeplang.doperables.serialization import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue} import io.deepsense.deeplang.catalogs.doperable.exceptions.NoParameterlessConstructorInClassException import io.deepsense.deeplang.params.Params import io.deepsense.deeplang.{ExecutionContext, TypeUtils} trait ParamsSerialization { self: Params => def saveObjectWithParams(ctx: ExecutionContext, path: String): Unit = { saveMetadata(ctx, path) saveParams(ctx, path) } def loadAndSetParams(ctx: ExecutionContext, path: String): this.type = { setParams(loadParams(ctx, path)) } protected def saveMetadata(ctx: ExecutionContext, path: String) = { val metadataFilePath = ParamsSerialization.metadataFilePath(path) val metadataJson = JsObject( ParamsSerialization.classNameKey -> JsString(this.getClass.getName) ) JsonObjectPersistence.saveJsonToFile(ctx, metadataFilePath, metadataJson) } protected def saveParams(ctx: ExecutionContext, path: String): Unit = { val paramsFilePath = ParamsSerialization.paramsFilePath(path) JsonObjectPersistence.saveJsonToFile(ctx, paramsFilePath, paramValuesToJson) } protected def loadParams(ctx: ExecutionContext, path: String): JsValue = { JsonObjectPersistence.loadJsonFromFile(ctx, ParamsSerialization.paramsFilePath(path)) } private def setParams(paramsJson: JsValue): this.type = { this.set(paramPairsFromJson(paramsJson): _*) } } object ParamsSerialization { val classNameKey = "className" val paramsFileName = "params" val metadataFileName = "metadata" def load(ctx: ExecutionContext, path: String): Loadable = { import DefaultJsonProtocol._ val metadataPath = metadataFilePath(path) val metadataJson: JsObject = JsonObjectPersistence.loadJsonFromFile(ctx, metadataPath).asJsObject val className = metadataJson.fields(classNameKey).convertTo[String] val clazz: Class[_] = Class.forName(className) val loadable = TypeUtils.createInstance(TypeUtils.constructorForClass(clazz) .getOrElse(throw new NoParameterlessConstructorInClassException(clazz.getCanonicalName)) ).asInstanceOf[Loadable] loadable.load(ctx, path) } def metadataFilePath(path: String): String = { PathsUtils.combinePaths(path, metadataFileName) } def paramsFilePath(path: String): String = { PathsUtils.combinePaths(path, paramsFileName) } }