java.sql.SQLException Scala Examples
The following examples show how to use java.sql.SQLException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: QueryFunctions.scala From azure-sqldb-spark with MIT License | 5 votes |
package com.microsoft.azure.sqldb.spark.query import java.sql.{Connection, SQLException} import com.microsoft.azure.sqldb.spark.connect.ConnectionUtils._ import com.microsoft.azure.sqldb.spark.LoggingTrait import com.microsoft.azure.sqldb.spark.config.{Config, SqlDBConfig} import com.microsoft.azure.sqldb.spark.connect._ import org.apache.spark.sql.{DataFrame, SQLContext} def sqlDBQuery(config: Config): Either[DataFrame, Boolean] = { var connection: Connection = null val sql = config.get[String](SqlDBConfig.QueryCustom).getOrElse( throw new IllegalArgumentException("Query not found in QueryCustom in Config") ) try { connection = getConnection(config) val statement = connection.createStatement() if (statement.execute(sql)) { Left(sqlContext.read.sqlDB(config)) } else { Right(true) } } catch { case sqlException: SQLException => { sqlException.printStackTrace() Right(false) } case exception: Exception => { exception.printStackTrace() Right(false) } } finally { connection.close() } } }
Example 2
Source File: CommentModel.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.models; import scala.util.Random import mimir.algebra._ import mimir.util._ import java.sql.SQLException @SerialVersionUID(1001L) class CommentModel(override val name: ID, cols:Seq[ID], colTypes:Seq[Type], comments:Seq[String]) extends Model(name) with Serializable with SourcedFeedback { def getFeedbackKey(idx: Int, args: Seq[PrimitiveValue] ) : ID = ID(s"${args(0).asString}:$idx") def argTypes(idx: Int) = Seq(TRowId()) def varType(idx: Int, args: Seq[Type]) = colTypes(idx) def bestGuess(idx: Int, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue] ) = { getFeedback(idx, args) match { case Some(v) => v case None => { hints(0) } } } def sample(idx: Int, randomness: Random, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue]) = hints(0) def reason(idx: Int, args: Seq[PrimitiveValue],hints: Seq[PrimitiveValue]): String = { //println("CommentModel:reason: " + idx + " [ " + args.mkString(",") + " ] [ " + hints.mkString(",") + " ]" ); val rowid = RowIdPrimitive(args(0).asString) val rval = getFeedback(idx, args) match { case Some(v) => s"${getReasonWho(idx,args)} told me that $v is valid for row $rowid" case None => s" ${comments(idx)}" } rval } def feedback(idx: Int, args: Seq[PrimitiveValue], v: PrimitiveValue): Unit = { val rowid = args(0).asString setFeedback(idx, args, v) } def isAcknowledged (idx: Int, args: Seq[PrimitiveValue]): Boolean = hasFeedback(idx, args) def hintTypes(idx: Int): Seq[mimir.algebra.Type] = colTypes //def getDomain(idx: Int, args: Seq[PrimitiveValue], hints:Seq[PrimitiveValue]): Seq[(PrimitiveValue,Double)] = Seq((hints(0), 0.0)) def confidence (idx: Int, args: Seq[PrimitiveValue], hints: Seq[PrimitiveValue]): Double = { val rowid = RowIdPrimitive(args(0).asString) getFeedback(idx,args) match { case Some(v) => { 1.0 } case None => { 0.0 } } } }
Example 3
Source File: Sampler.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.ctables.vgterm import mimir.algebra._ import scala.util._ import mimir.models._ import java.sql.SQLException case class Sampler( model: Model, idx: Int, vgArgs: Seq[Expression], vgHints: Seq[Expression], seed: Expression ) extends Proc( (seed :: (vgArgs.toList ++ vgHints.toList)) ) { def getType(argTypes: Seq[Type]): Type = model.varType(idx, argTypes) def get(v: Seq[PrimitiveValue]): PrimitiveValue = { if(v.size < 1){ throw new SQLException("Internal error. Expecting seed.") } val seed = v.head val (argValues, hintValues) = v.tail.splitAt(vgArgs.length) model.sample(idx, seed.asLong, argValues, hintValues) } def rebuild(v: Seq[Expression]) = { if(v.size < 1){ throw new SQLException("Internal error. Expecting seed.") } var (a, h) = v.tail.splitAt(vgArgs.length) Sampler(model, idx, a, h, v.head) } }
Example 4
Source File: IsAcknowledged.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.ctables.vgterm import mimir.algebra._ import scala.util._ import mimir.models._ import java.sql.SQLException case class IsAcknowledged( model: Model, idx: Int, vgArgs: Seq[Expression] ) extends Proc( vgArgs ) { def getType(argTypes: Seq[Type]): Type = TBool() def get(v: Seq[PrimitiveValue]): PrimitiveValue = { BoolPrimitive(model.isAcknowledged(idx, v)) } def rebuild(v: Seq[Expression]) = IsAcknowledged(model, idx, v) override def toString = s"VGTERM_ACKNOWLEDGED(${(Seq(model.name, idx.toString) ++ vgArgs.map(_.toString)).mkString(", ")})" }
Example 5
Source File: SQLParsers.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.test import java.io._ import java.sql.SQLException import mimir.parser._ import sparsity.statement.Statement import fastparse.Parsed trait SQLParsers { def stmts(f: String): Seq[MimirStatement] = stmts(new File(f)) def stmts(f: File): Seq[MimirStatement] = { val p = MimirCommand(new FileReader(f)) p.map { case Parsed.Success(SQLCommand(cmd), _) => cmd case Parsed.Success(cmd:SlashCommand, _) => throw new SQLException(s"Expecting SQL, not command: $cmd") case fail:Parsed.Failure => throw new SQLException(fail.longMsg) }.toSeq } def stmt(s: String) = MimirSQL.Get(s) def selectStmt(s: String) = MimirSQL.Select(s) def sqlExpr(s:String) = sparsity.parser.Expression(s) }
Example 6
Source File: DetectSeriesSpec.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.statistics import java.io.{BufferedReader, File, FileReader, StringReader} import java.sql.SQLException import scala.collection.JavaConversions._ import org.specs2.mutable._ import org.specs2.matcher.FileMatchers import mimir._ import mimir.sql._ import mimir.parser._ import mimir.algebra._ import mimir.optimizer._ import mimir.ctables._ import mimir.exec._ import mimir.util._ import mimir.test._ import mimir.statistics._ object DetectSeriesSpec extends SQLTestSpecification("DetectSeriesTest"){ sequential def testDetectSeriesof(oper: Operator) = { val (schema, df) = SparkUtils.getDataFrameWithProvFromQuery(db, oper) DetectSeries.seriesOf(df, schema, 0.1).collect().toSeq } "The DetectSeriesSpec" should { "Be able to load DetectSeriesTest1" >> { db.loader.loadTable("test/data/DetectSeriesTest1.csv"); ok } "Be able to detect Date and Timestamp type" >> { val queryOper = select("SELECT * FROM DetectSeriesTest1") val colSeq: Seq[String] = testDetectSeriesof(queryOper).map{_.columnName.toString} colSeq must have size(4) colSeq must contain("TRAN_TS","EXP_DT", "JOIN_DT", "DOB") } "Be able to create a new schema and detect Date and Timestamp type" >> { db.catalog.materializedTableProvider().createStoredTableAs( HardTable(Seq( ID("JN_DT") -> TDate(), ID("JN_TS") -> TTimestamp() ), Seq()), ID("DetectSeriesTest3"), db ) val queryOper = select("SELECT * FROM DetectSeriesTest3") val colSeq: Seq[String] = testDetectSeriesof(queryOper).map{_.columnName.toString} colSeq must have size(2) colSeq must contain("JN_DT", "JN_TS") } "Be able to load DetectSeriesTest2" >> { db.loader.loadTable("test/data/DetectSeriesTest2.csv"); ok } "Be able to detect Date, Timestamp and increasing-decreasing Numeric type" >> { val queryOper = select("SELECT * FROM DetectSeriesTest2") val colSeq: Seq[String] = testDetectSeriesof(queryOper).map{_.columnName.toString} colSeq must have size(6) colSeq must contain("TRAN_TS","EXP_DT", "JOIN_DT", "DOB", "ROW_ID", "QUALITY") } } }
Example 7
package tamer package db import java.sql.SQLException import java.time.Instant import cats.effect.Blocker import doobie.hikari.HikariTransactor import doobie.implicits._ import doobie.util.transactor.Transactor import eu.timepit.refined.auto._ import fs2.{Chunk, Stream} import log.effect.LogWriter import log.effect.zio.ZioLogWriter.log4sFromName import tamer.config.{DbConfig, QueryConfig} import zio._ import zio.interop.catz._ import scala.concurrent.ExecutionContext trait Db extends Serializable { val db: Db.Service[Any] } object Db { implicit class InstantOps(ours: Instant) { def -(theirs: Instant): Long = ours.toEpochMilli - theirs.toEpochMilli } case class ChunkWithMetadata[V](chunk: Chunk[V], pulledAt: Instant = Instant.now()) case class ValueWithMetadata[V](value: V, pulledAt: Instant = Instant.now()) trait Service[R] { def runQuery[K, V, State]( tnx: Transactor[Task], setup: Setup[K, V, State], queryConfig: QueryConfig )(state: State, q: Queue[(K, V)]): ZIO[R, DbError, State] } object > extends Service[Db] { override final def runQuery[K, V, State]( tnx: Transactor[Task], setup: Setup[K, V, State], queryConfig: QueryConfig )(state: State, q: Queue[(K, V)]): ZIO[Db, DbError, State] = ZIO.accessM(_.db.runQuery(tnx, setup, queryConfig)(state, q)) } trait Live extends Db { override final val db: Service[Any] = new Service[Any] { private[this] val logTask: Task[LogWriter[Task]] = log4sFromName.provide("tamer.Db.Live") override final def runQuery[K, V, State]( tnx: Transactor[Task], setup: Setup[K, V, State], queryConfig: QueryConfig )(state: State, q: Queue[(K, V)]): IO[DbError, State] = (for { log <- logTask query <- UIO(setup.buildQuery(state)) _ <- log.debug(s"running ${query.sql} with params derived from $state").ignore start <- UIO(Instant.now()) values <- query .streamWithChunkSize(queryConfig.fetchChunkSize) .chunks .transact(tnx) .map(c => ChunkWithMetadata(c)) .evalTap(c => q.offerAll(c.chunk.iterator.to(LazyList).map(v => setup.valueToKey(v) -> v))) .flatMap(c => Stream.chunk(c.chunk).map(v => ValueWithMetadata(v, c.pulledAt))) .compile .toList newState <- setup.stateFoldM(state)( QueryResult( ResultMetadata(values.headOption.fold(Instant.now())(_.pulledAt) - start), values.map(_.value) ) ) } yield newState).mapError { case e: Exception => DbError(e.getLocalizedMessage) } } } def mkTransactor(db: DbConfig, connectEC: ExecutionContext, transactEC: ExecutionContext): Managed[DbError, HikariTransactor[Task]] = Managed { HikariTransactor .newHikariTransactor[Task](db.driver, db.uri, db.username, db.password, connectEC, Blocker.liftExecutionContext(transactEC)) .allocated .map { case (ht, cleanup) => Reservation(ZIO.succeed(ht), _ => cleanup.orDie) } .uninterruptible .refineToOrDie[SQLException] .mapError(sqle => DbError(sqle.getLocalizedMessage())) } }
Example 8
Source File: MySQLDialect.scala From ez-framework with Apache License 2.0 | 5 votes |
package com.ecfront.ez.framework.service.jdbc.dialect import java.sql.SQLException import com.ecfront.ez.framework.service.jdbc.dialect.DialectType.DialectType object MySQLDialect extends Dialect { def paging(sql: String, pageNumber: Long, pageSize: Long): String = { sql + " LIMIT " + (pageNumber - 1) * pageSize + ", " + pageSize } def count(sql: String): String = { "SELECT count(1) FROM ( " + sql + " ) _ctmp" } def getTableInfo(tableName: String): String = ??? override def createTableIfNotExist(tableName: String, tableDesc: String, fields: List[FiledInfo], indexFields: List[String], uniqueFields: List[String], pkField: String): String = { val ddl = new StringBuilder(s"CREATE TABLE IF NOT EXISTS `$tableName` ( ") fields.reverse.foreach { field => val columnName = field.name.toLowerCase val desc = s"COMMENT '${field.desc}'" val len = field.len val scale = field.scale val columnExt = field.dType.toLowerCase match { case "seq" => s"INT NOT NULL AUTO_INCREMENT" case t if t == "int" || t == "integer" => s"INT${if (len == 0) "" else "(" + len + ")"} NOT NULL ${if (pkField != null && pkField.toLowerCase == columnName) "AUTO_INCREMENT" else """DEFAULT "0""""}" case "long" => s"""BIGINT${if (len == 0) "" else "(" + len + ")"} NOT NULL DEFAULT "0"""" case "short" => s"""SMALLINT${if (len == 0) "" else "(" + len + ")"} NOT NULL DEFAULT "0"""" case "string" => if (len == 0) { s"""TEXT NOT NULL""" } else { s"""VARCHAR${if (len == 0) "" else "(" + len + ")"} NOT NULL DEFAULT """"" } case t if t == "bool" || t == "boolean" => s"""BOOLEAN NOT NULL DEFAULT "0"""" case "float" => s"""FLOAT${if (len == 0) "" else "(" + len + "," + scale + ")"} NOT NULL DEFAULT "0"""" case "double" => s"""DOUBLE${if (len == 0) "" else "(" + len + "," + scale + ")"} NOT NULL DEFAULT "0"""" case "bigdecimal" => s"""DECIMAL${if (len == 0) "" else "(" + len + "," + scale + ")"} NOT NULL DEFAULT "0"""" case "java.util.date" => s"""DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP""" case t => throw new SQLException(s"Not support data type: $t at $tableName") } ddl.append("\r\n `" + columnName + "` " + columnExt + " " + desc + ",") } if (pkField != null && pkField.trim.nonEmpty) { ddl.append(s"\r\n PRIMARY KEY (`${pkField.toLowerCase}`),") } if (uniqueFields != null && uniqueFields.nonEmpty) { uniqueFields.foreach { uniqueField => ddl.append(s"\r\n UNIQUE KEY `uni_${uniqueField.toLowerCase}` (`${uniqueField.toLowerCase}`),") } } if (indexFields != null && indexFields.nonEmpty) { indexFields.foreach { indexField => ddl.append(s"\r\n KEY `idx_${indexField.toLowerCase}` (`${indexField.toLowerCase}`),") } } ddl.substring(0, ddl.length - 1) + s"\r\n) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='$tableDesc'" } override def changeTableName(oriTableName: String, newTableName: String): String = { s"RENAME TABLE $oriTableName TO $newTableName" } def getDriver: String = "com.mysql.jdbc.Driver" def getDialectType: DialectType = DialectType.MYSQL }
Example 9
Source File: ConnectionUtils.scala From azure-sqldb-spark with MIT License | 5 votes |
package com.microsoft.azure.sqldb.spark.connect import java.sql.{Connection, DriverManager, SQLException} import java.util.Properties import com.microsoft.azure.sqldb.spark.config.{Config, SqlDBConfig} /** * Helper and utility methods used for setting up or using a connection */ private[spark] object ConnectionUtils { /** * Retrieves all connection properties in the Config object * and returns them as a [[Properties]] object. * * @param config the Config object with specified connection properties. * @return A connection [[Properties]] object. */ def createConnectionProperties(config: Config): Properties = { val connectionProperties = new Properties() for (key <- config.getAllKeys) { connectionProperties.put(key.toString, config.get[String](key.toString).get) } connectionProperties } /** * Adds the "jdbc:sqlserver://" suffix to a general server url * * @param url the string url without the JDBC prefix * @return the url with the added JDBC prefix */ def createJDBCUrl(url: String): String = SqlDBConfig.JDBCUrlPrefix + url /** * Gets a JDBC connection based on Config properties * * @param config any read or write Config * @return a JDBC Connection */ def getConnection(config: Config): Connection = { Class.forName(SqlDBConfig.SQLjdbcDriver) DriverManager.getConnection( createJDBCUrl(config.get[String](SqlDBConfig.URL).get), createConnectionProperties(config)) } /** * Retrieves the DBTable or QueryCustom specified in the config. * NOTE: only one property can exist within config. * * @param config the Config object with specified properties. * @return The specified DBTable or QueryCustom */ def getTableOrQuery(config: Config): String = { config.get[String](SqlDBConfig.DBTable).getOrElse( getQueryCustom(config.get[String](SqlDBConfig.QueryCustom).get) ) } /** * The JDBC driver requires parentheses and a temp variable around any custom queries. * This adds the required syntax so users only need to specify the query. * * @param query the default query * @return the syntactically correct query to be executed by the JDBC driver. */ def getQueryCustom(query: String): String = s"($query) QueryCustom" }
Example 10
Source File: FunctionRegistry.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.algebra.function; import java.sql.SQLException import mimir.parser.ExpressionParser import mimir.algebra._ import mimir.Database sealed abstract class RegisteredFunction { val name: ID } case class NativeFunction( name: ID, evaluator: Seq[PrimitiveValue] => PrimitiveValue, typechecker: Seq[Type] => Type, passthrough:Boolean = false ) extends RegisteredFunction case class ExpressionFunction( name: ID, args:Seq[ID], expr: Expression ) extends RegisteredFunction case class FoldFunction( name: ID, expr: Expression ) extends RegisteredFunction class FunctionRegistry { var functionPrototypes: scala.collection.mutable.Map[ID, RegisteredFunction] = scala.collection.mutable.Map.empty; { GeoFunctions.register(this) JsonFunctions.register(this) NumericFunctions.register(this) SampleFunctions.register(this) StringFunctions.register(this) TypeFunctions.register(this) UtilityFunctions.register(this) RandomnessFunctions.register(this) TimeFunctions.register(this) //it's to early to do this here. Spark is not open yet. //SparkFunctions.register(this) } def register( fname:ID, eval:Seq[PrimitiveValue] => PrimitiveValue, typechecker: Seq[Type] => Type ): Unit = register(new NativeFunction(fname, eval, typechecker)) def registerPassthrough( fname:ID, eval:Seq[PrimitiveValue] => PrimitiveValue, typechecker: Seq[Type] => Type ): Unit = register(new NativeFunction(fname, eval, typechecker, true)) def registerExpr(fname:ID, args:Seq[ID], expr:String): Unit = registerExpr(fname, args, ExpressionParser.expr(expr)) def registerExpr(fname:ID, args:Seq[ID], expr:Expression): Unit = register(new ExpressionFunction(fname, args, expr)) def registerFold(fname:ID, expr:String): Unit = registerFold(fname, ExpressionParser.expr(expr)) def registerFold(fname:ID, expr:Expression): Unit = register(new FoldFunction(fname, expr)) def register(fn: RegisteredFunction) = functionPrototypes.put(fn.name, fn) def get(fname: ID): RegisteredFunction = { functionPrototypes.get(fname) match { case Some(func) => func case None => throw new RAException(s"Unknown function '$fname'") } } def getOption(fname: ID): Option[RegisteredFunction] = functionPrototypes.get(fname) def unfold(fname: ID, args: Seq[Expression]): Option[Expression] = get(fname) match { case _:NativeFunction => None case ExpressionFunction(_, argNames, expr) => Some(Eval.inline(expr, argNames.zip(args).toMap)) case FoldFunction(_, expr) => Some( args.tail.foldLeft[Expression](args.head){ case (curr,next) => Eval.inline(expr, Map(ID("CURR") -> curr, ID("NEXT") -> next)) } ) } }
Example 11
Source File: ParamBinder.scala From spark-sql-server with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.server.service import java.sql.SQLException import org.apache.spark.sql.catalyst.expressions.Literal import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.server.catalyst.expressions.ParameterPlaceHolder object ParamBinder { def bind(logicalPlan: LogicalPlan, params: Map[Int, Literal]): LogicalPlan = { val boundPlan = logicalPlan.transformAllExpressions { case ParameterPlaceHolder(id) if params.contains(id) => params(id) } val unresolvedParams = boundPlan.flatMap { plan => plan.expressions.flatMap { _.flatMap { case ParameterPlaceHolder(id) => Some(id) case _ => None }} } if (unresolvedParams.nonEmpty) { throw new SQLException("Unresolved parameters found: " + unresolvedParams.map(n => s"$$$n").mkString(", ")) } boundPlan } }
Example 12
Source File: ParameterBinderSuite.scala From spark-sql-server with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.server.service.postgresql.protocol.v3 import java.sql.SQLException import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions.{And, EqualTo, Literal} import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.server.catalyst.expressions.ParameterPlaceHolder import org.apache.spark.sql.server.service.ParamBinder import org.apache.spark.sql.types._ class ParameterBinderSuite extends PlanTest { test("bind parameters") { val c0 = 'a.int val c1 = 'b.int val r1 = LocalRelation(c0, c1) val param1 = Literal(18, IntegerType) val lp1 = Filter(EqualTo(c0, ParameterPlaceHolder(1)), r1) val expected1 = Filter(EqualTo(c0, param1), r1) comparePlans(expected1, ParamBinder.bind(lp1, Map(1 -> param1))) val param2 = Literal(42, IntegerType) val lp2 = Filter(EqualTo(c0, ParameterPlaceHolder(300)), r1) val expected2 = Filter(EqualTo(c0, param2), r1) comparePlans(expected2, ParamBinder.bind(lp2, Map(300 -> param2))) val param3 = Literal(-1, IntegerType) val param4 = Literal(48, IntegerType) val lp3 = Filter( And( EqualTo(c0, ParameterPlaceHolder(1)), EqualTo(c1, ParameterPlaceHolder(2)) ), r1) val expected3 = Filter( And( EqualTo(c0, param3), EqualTo(c1, param4) ), r1) comparePlans(expected3, ParamBinder.bind(lp3, Map(1 -> param3, 2 -> param4))) val errMsg1 = intercept[SQLException] { ParamBinder.bind(lp1, Map.empty) }.getMessage assert(errMsg1 == "Unresolved parameters found: $1") val errMsg2 = intercept[SQLException] { ParamBinder.bind(lp2, Map.empty) }.getMessage assert(errMsg2 == "Unresolved parameters found: $300") val errMsg3 = intercept[SQLException] { ParamBinder.bind(lp3, Map.empty) }.getMessage assert(errMsg3 == "Unresolved parameters found: $1, $2") } }
Example 13
Source File: PgWireProtocolSuite.scala From spark-sql-server with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.server.service.postgresql.protocol.v3 import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.sql.SQLException import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.expressions.GenericInternalRow import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.StructType import org.apache.spark.unsafe.types.UTF8String class PgWireProtocolSuite extends SparkFunSuite { val conf = new SQLConf() test("DataRow") { val v3Protocol = new PgWireProtocol(65536) val row = new GenericInternalRow(2) row.update(0, 8) row.update(1, UTF8String.fromString("abcdefghij")) val schema = StructType.fromDDL("a INT, b STRING") val rowConverters = PgRowConverters(conf, schema, Seq(true, false)) val data = v3Protocol.DataRow(row, rowConverters) val bytes = ByteBuffer.wrap(data) assert(bytes.get() === 'D'.toByte) assert(bytes.getInt === 28) assert(bytes.getShort === 2) assert(bytes.getInt === 4) assert(bytes.getInt === 8) assert(bytes.getInt === 10) assert(data.slice(19, 30) === "abcdefghij".getBytes(StandardCharsets.UTF_8)) } test("Fails when message buffer overflowed") { val v3Protocol = new PgWireProtocol(4) val row = new GenericInternalRow(1) row.update(0, UTF8String.fromString("abcdefghijk")) val schema = StructType.fromDDL("a STRING") val rowConverters = PgRowConverters(conf, schema, Seq(false)) val errMsg = intercept[SQLException] { v3Protocol.DataRow(row, rowConverters) }.getMessage assert(errMsg.contains( "Cannot generate a V3 protocol message because buffer is not enough for the message. " + "To avoid this exception, you might set higher value at " + "'spark.sql.server.messageBufferSizeInBytes'") ) } }
Example 14
Source File: MysqlSink.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.jdbcOutput import java.sql.{Connection, DriverManager, PreparedStatement, SQLException} import org.apache.flink.configuration.Configuration import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction} import org.slf4j.{Logger, LoggerFactory} class MysqlSink extends RichSinkFunction[User] { val logger: Logger = LoggerFactory.getLogger("MysqlSink") var conn: Connection = _ var ps: PreparedStatement = _ val jdbcUrl = "jdbc:mysql://192.168.229.128:3306?useSSL=false&allowPublicKeyRetrieval=true" val username = "root" val password = "123456" val driverName = "com.mysql.jdbc.Driver" override def open(parameters: Configuration): Unit = { Class.forName(driverName) try { Class.forName(driverName) conn = DriverManager.getConnection(jdbcUrl, username, password) // close auto commit conn.setAutoCommit(false) } catch { case e@(_: ClassNotFoundException | _: SQLException) => logger.error("init mysql error") e.printStackTrace() System.exit(-1); } } override def invoke(user: User, context: SinkFunction.Context[_]): Unit = { println("get user : " + user.toString) ps = conn.prepareStatement("insert into async.user(username, password, sex, phone) values(?,?,?,?)") ps.setString(1, user.username) ps.setString(2, user.password) ps.setInt(3, user.sex) ps.setString(4, user.phone) ps.execute() conn.commit() } override def close(): Unit = { if (conn != null){ conn.commit() conn.close() } } }
Example 15
Source File: MysqlSink1.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.jdbcOutput import java.sql.{Connection, DriverManager, PreparedStatement, SQLException} import org.apache.flink.api.common.io.OutputFormat import org.apache.flink.configuration.Configuration import org.slf4j.{Logger, LoggerFactory} class MysqlSink1 extends OutputFormat[User]{ val logger: Logger = LoggerFactory.getLogger("MysqlSink1") var conn: Connection = _ var ps: PreparedStatement = _ val jdbcUrl = "jdbc:mysql://192.168.229.128:3306?useSSL=false&allowPublicKeyRetrieval=true" val username = "root" val password = "123456" val driverName = "com.mysql.jdbc.Driver" override def configure(parameters: Configuration): Unit = { // not need } override def open(taskNumber: Int, numTasks: Int): Unit = { Class.forName(driverName) try { Class.forName(driverName) conn = DriverManager.getConnection(jdbcUrl, username, password) // close auto commit conn.setAutoCommit(false) } catch { case e@(_: ClassNotFoundException | _: SQLException) => logger.error("init mysql error") e.printStackTrace() System.exit(-1); } } override def writeRecord(user: User): Unit = { println("get user : " + user.toString) ps = conn.prepareStatement("insert into async.user(username, password, sex, phone) values(?,?,?,?)") ps.setString(1, user.username) ps.setString(2, user.password) ps.setInt(3, user.sex) ps.setString(4, user.phone) ps.execute() conn.commit() } override def close(): Unit = { if (conn != null){ conn.commit() conn.close() } } }
Example 16
Source File: CustomerTimerDemo.scala From flink-rookie with Apache License 2.0 | 5 votes |
package com.venn.stream.api.timer import java.io.File import java.sql.{Connection, DriverManager, PreparedStatement, SQLException} import java.util import java.util.{Timer, TimerTask} import org.apache.flink.api.scala._ import com.venn.common.Common import com.venn.util.TwoStringSource import org.apache.flink.api.common.functions.RichMapFunction import org.apache.flink.api.common.serialization.SimpleStringSchema import org.apache.flink.configuration.Configuration import org.apache.flink.runtime.state.filesystem.FsStateBackend import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment import org.apache.flink.streaming.api.{CheckpointingMode, TimeCharacteristic} import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer import org.slf4j.LoggerFactory def query() = { logger.info("query mysql") try { Class.forName(driverName) conn = DriverManager.getConnection(jdbcUrl, username, password) ps = conn.prepareStatement("select id,name from venn.timer") val rs = ps.executeQuery while (!rs.isClosed && rs.next) { val id = rs.getString(1) val name = rs.getString(2) map.put(id, name) } logger.info("get config from db size : {}", map.size()) } catch { case e@(_: ClassNotFoundException | _: SQLException) => e.printStackTrace() } finally { if (conn != null) { conn.close() } } } }) // .print() val sink = new FlinkKafkaProducer[String]("timer_out" , new SimpleStringSchema() , Common.getProp) stream.addSink(sink) env.execute(this.getClass.getName) } }
Example 17
Source File: ZioSlickSupport.scala From full-scala-stack with Apache License 2.0 | 5 votes |
package zioslick import java.sql.SQLException import slick.SlickException import slick.dbio.DBIO import zio.ZIO trait ZioSlickSupport { def fromDBIO[R](dbio: DBIO[R]): SlickZIO[R] = for { db <- ZIO.accessM[DatabaseProvider](_.databaseProvider.db) r <- ZIO .fromFuture(_ => db.run(dbio)) .mapError { case e: SlickException => RepositoryException("Slick Repository Error", Some(e)) case e: SQLException => RepositoryException("SQL Repository Error", Some(e)) } } yield r }
Example 18
Source File: ExpressionParser.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.parser import java.sql.SQLException import fastparse._ import sparsity.parser.{ Expression => SparsityExpr } import mimir.algebra._ import mimir.sql.SqlToRA object ExpressionParser { def apply = expr _ def expr(input: String): Expression = parse(input, SparsityExpr.expression(_)) match { case Parsed.Success(expr, _) => SqlToRA(expr, SqlToRA.literalBindings(_)) case Parsed.Failure(msg, idx, extra) => throw new SQLException(s"Invalid expression (failure @ $idx: ${extra.trace().longMsg}) $input") } def list(input: String): Seq[Expression] = parse(input, SparsityExpr.expressionList(_)) match { case Parsed.Success(exprList, _) => exprList.map { SqlToRA(_, SqlToRA.literalBindings(_)) } case Parsed.Failure(msg, idx, extra) => throw new SQLException(s"Invalid expression list (failure @ $idx: ${extra.trace().longMsg}) $input") } def prim(input: String): PrimitiveValue = parse(input, SparsityExpr.primitive(_)) match { case Parsed.Success(prim, _) => SqlToRA(prim) case Parsed.Failure(msg, idx, extra) => throw new SQLException(s"Invalid primitive (failure @ $idx: ${extra.trace().longMsg}) $input") } def function(input: String): Function = parse(input, SparsityExpr.function(_)) match { case Parsed.Success(sparsity.expression.Function(name, args, _), _) => Function(ID.lower(name), args.toSeq.flatten.map { SqlToRA(_, SqlToRA.literalBindings(_)) }) case Parsed.Failure(msg, idx, extra) => throw new SQLException(s"Invalid function (failure @ $idx: ${extra.trace().longMsg}) $input") } }
Example 19
Source File: DataSourceConnectionProviderSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.sql import java.sql.SQLException import java.util.Properties import com.typesafe.config.ConfigFactory import com.zaxxer.hikari.{HikariConfig, HikariDataSource} import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import scala.collection.JavaConverters._ class DataSourceConnectionProviderSpec extends Matchers with AnyFlatSpecLike with BeforeAndAfterAll { val properties = new Properties val cfg = ConfigFactory.load().getConfig("db-cfg") cfg .entrySet() .asScala .foreach(e => properties.setProperty(e.getKey(), cfg.getString(e.getKey()))) private val hikariConfig = new HikariConfig(properties) private val ds = new HikariDataSource(hikariConfig) override def afterAll() = ds.close() "The DataSourceConnectionProvider" should "establish a connection" in { val p = new DataSourceConnectionProvider(ds) p.getConnection().isValid(1) shouldBe true } it should "close the connection" in { val p = new DataSourceConnectionProvider(ds) p.close() intercept[SQLException](p.getConnection()) } "The DriverManagerConnectionProvider" should "be configured properly" in { val config = ConfigFactory.parseString(""" |connection.url = url |connection.user = test |connection.password = password |connection.max.retries = 20 |connection.retry.backoff = 10s """.stripMargin) val c = DriverManagerConnectionProvider(config) c.password shouldBe "password" c.connectionUrl shouldBe "url" c.username shouldBe "test" c.retryBackoff.toSeconds shouldBe 10 c.maxConnectionAttempts shouldBe 20 c.close() } it should "return a new connection" in { val config = ConfigFactory.parseString( """ |connection.url = "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1" |connection.max.retries = 2 |connection.retry.backoff = 1s """.stripMargin ) val c = DriverManagerConnectionProvider(config) c.getConnection() should not be null c.getNewConnection() should not be null c.close() c.connection.isValid(2) shouldBe false } }
Example 20
Source File: Repository.scala From Learn-Scala-Programming with MIT License | 5 votes |
package ch14 import java.sql.SQLException import cats.effect.IO import ch14.Model.Inventory import fs2.Stream import doobie._ import doobie.implicits._ import doobie.util.transactor.Transactor import cats.implicits._ class Repository(transactor: Transactor[IO]) { def deleteArticle(name: String): IO[Boolean] = { sql"DELETE FROM article WHERE name = $name".update.run .transact(transactor) .map { affectedRows => affectedRows == 1 } } def createArticle(name: String): IO[Boolean] = { sql"INSERT INTO article (name, count) VALUES ($name, 0)".update.run.attempt .transact(transactor) .map { case Right(affectedRows) => affectedRows == 1 case Left(_) => false } } def updateStock(inventory: Inventory): Stream[IO, Either[Throwable, Unit]] = { val updates = inventory .map { case (name, count) => sql"UPDATE article set count = count + $count where name = $name".update.run } .reduce(_ *> _) Stream .eval(FC.setAutoCommit(false) *> updates *> FC.setAutoCommit(true)) .attempt .transact(transactor) } def getInventory: Stream[IO, Inventory] = queryToInventory(inventoryQuery) def getArticle(name: String): Stream[IO, Inventory] = queryToInventory( sql"SELECT name, count FROM article where name = $name") private val inventoryQuery: Fragment = sql"SELECT name, count FROM article" private def queryToInventory(query: Fragment) = query .query[(String, Int)] .stream .transact(transactor) .fold(Map.empty[String, Int])(_ + _) }
Example 21
Source File: ExasolDockerContainerSuite.scala From spark-exasol-connector with Apache License 2.0 | 5 votes |
package com.exasol.spark import java.sql.DriverManager import java.sql.SQLException import org.scalatest.funsuite.AnyFunSuite class ExasolDockerContainerSuite extends AnyFunSuite with BaseDockerSuite { test("exasol/docker-db container should be started") { Class.forName(container.driverClassName) // scalastyle:ignore classForName val connectionStr = s"${container.jdbcUrl};user=${container.username};password=${container.password}" val connection = DriverManager.getConnection(connectionStr) val prepareStatement = connection.prepareStatement(container.testQueryString) try { val resultSet = prepareStatement.executeQuery() while (resultSet.next()) { assert(resultSet.getInt(1) == 1) } resultSet.close() } catch { case ex: SQLException => ex.printStackTrace() } finally { prepareStatement.close() } connection.close() } }
Example 22
Source File: AddressDAOTest.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.wallet.models import java.sql.SQLException import org.bitcoins.testkit.fixtures.WalletDAOFixture import org.bitcoins.testkit.wallet.{BitcoinSWalletTest, WalletTestUtil} class AddressDAOTest extends BitcoinSWalletTest with WalletDAOFixture { behavior of "AddressDAO" it should "fail to insert and read an address into the database without a corresponding account" in { daos => val addressDAO = daos.addressDAO val readF = { val addressDb = WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb) addressDAO.create(addressDb) } recoverToSucceededIf[SQLException](readF) } it should "insert and read an address into the database with a corresponding account" in { daos => val accountDAO = daos.accountDAO val addressDAO = daos.addressDAO for { createdAccount <- { val account = WalletTestUtil.firstAccountDb accountDAO.create(account) } createdAddress <- { val addressDb = WalletTestUtil.getAddressDb(createdAccount) addressDAO.create(addressDb) } readAddress <- addressDAO.read(createdAddress.address) } yield assert(readAddress.contains(createdAddress)) } }
Example 23
Source File: SQLHelper.scala From memsql-spark-connector with Apache License 2.0 | 5 votes |
package com.memsql.spark import java.sql.SQLException import scala.annotation.tailrec object SQLHelper { @tailrec def isSQLExceptionWithCode(e: Throwable, codes: List[Integer]): Boolean = e match { case e: SQLException if codes.contains(e.getErrorCode) => true case e if e.getCause != null => isSQLExceptionWithCode(e.getCause, codes) case e => e.printStackTrace() false } }
Example 24
Source File: KsqlConnectionSpec.scala From ksql-jdbc-driver with Apache License 2.0 | 5 votes |
package com.github.mmolimar.ksql.jdbc import java.sql.{Connection, SQLException, SQLFeatureNotSupportedException} import java.util.{Collections, Properties} import com.github.mmolimar.ksql.jdbc.utils.TestUtils._ import io.confluent.ksql.rest.client.{KsqlRestClient, MockableKsqlRestClient, RestResponse} import io.confluent.ksql.rest.entity._ import org.eclipse.jetty.http.HttpStatus.Code import org.scalamock.scalatest.MockFactory import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class KsqlConnectionSpec extends AnyWordSpec with Matchers with MockFactory { "A KsqlConnection" when { "validating specs" should { val values = KsqlConnectionValues("localhost", 8080, None, None, Map.empty[String, String]) val mockKsqlRestClient = mock[MockableKsqlRestClient] val ksqlConnection = new KsqlConnection(values, new Properties) { override def init: KsqlRestClient = mockKsqlRestClient } "throw not supported exception if not supported" in { val methods = implementedMethods[KsqlConnection] reflectMethods[KsqlConnection](methods = methods, implemented = false, obj = ksqlConnection) .foreach(method => { assertThrows[SQLFeatureNotSupportedException] { method() } }) } "work if implemented" in { assertThrows[SQLException] { ksqlConnection.isClosed } ksqlConnection.getTransactionIsolation should be(Connection.TRANSACTION_NONE) ksqlConnection.setClientInfo(new Properties) (mockKsqlRestClient.makeKsqlRequest(_: String)).expects(*) .returns(RestResponse.successful[KsqlEntityList](Code.OK, new KsqlEntityList)) ksqlConnection.setClientInfo("", "") assertThrows[SQLException] { (mockKsqlRestClient.makeKsqlRequest(_: String)).expects(*) .returns(RestResponse.erroneous(Code.INTERNAL_SERVER_ERROR, new KsqlErrorMessage(-1, "", Collections.emptyList[String]))) ksqlConnection.setClientInfo("", "") } ksqlConnection.isReadOnly should be(false) (mockKsqlRestClient.makeStatusRequest _: () => RestResponse[CommandStatuses]).expects .returns(RestResponse.successful[CommandStatuses] (Code.OK, new CommandStatuses(Collections.emptyMap[CommandId, CommandStatus.Status]))) ksqlConnection.isValid(0) should be(true) Option(ksqlConnection.getMetaData) should not be None Option(ksqlConnection.createStatement) should not be None assertThrows[SQLFeatureNotSupportedException] { ksqlConnection.createStatement(-1, -1) } ksqlConnection.setAutoCommit(true) ksqlConnection.setAutoCommit(false) ksqlConnection.getAutoCommit should be(false) ksqlConnection.getSchema should be(None.orNull) ksqlConnection.getWarnings should be(None.orNull) ksqlConnection.getCatalog should be(None.orNull) ksqlConnection.setCatalog("test") ksqlConnection.getCatalog should be(None.orNull) (mockKsqlRestClient.close _).expects ksqlConnection.close() ksqlConnection.isClosed should be(true) ksqlConnection.commit() } } } "A ConnectionNotSupported" when { "validating specs" should { "throw not supported exception if not supported" in { val resultSet = new ConnectionNotSupported reflectMethods[ConnectionNotSupported](methods = Seq.empty, implemented = false, obj = resultSet) .foreach(method => { assertThrows[SQLFeatureNotSupportedException] { method() } }) } } } }
Example 25
Source File: KyuubiSQLException.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi import java.sql.SQLException import scala.collection.JavaConverters._ import org.apache.hive.service.cli.thrift.{TStatus, TStatusCode} class KyuubiSQLException(reason: String, sqlState: String, vendorCode: Int, cause: Throwable) extends SQLException(reason, sqlState, vendorCode, cause) { def this(reason: String, sqlState: String, cause: Throwable) = this(reason, sqlState, 0, cause) def this(reason: String, sqlState: String, vendorCode: Int) = this(reason, sqlState, vendorCode, null) def this(reason: String, cause: Throwable) = this(reason, null, 0, cause) def this(reason: String, sqlState: String) = this(reason, sqlState, vendorCode = 0) def this(reason: String) = this(reason, sqlState = null) def this(cause: Throwable) = this(cause.toString, cause) def toTStatus: TStatus = { val tStatus = new TStatus(TStatusCode.ERROR_STATUS) tStatus.setSqlState(getSQLState) tStatus.setErrorCode(getErrorCode) tStatus.setErrorMessage(getMessage) tStatus.setInfoMessages(KyuubiSQLException.toString(this).asJava) tStatus } } object KyuubiSQLException { def toTStatus(e: Exception): TStatus = e match { case k: KyuubiSQLException => k.toTStatus case _ => val tStatus = new TStatus(TStatusCode.ERROR_STATUS) tStatus.setErrorMessage(e.getMessage) tStatus.setInfoMessages(toString(e).asJava) tStatus } def toString(cause: Throwable): List[String] = { toString(cause, null) } def toString(cause: Throwable, parent: Array[StackTraceElement]): List[String] = { val trace = cause.getStackTrace var m = trace.length - 1 if (parent != null) { var n = parent.length - 1 while (m >= 0 && n >=0 && trace(m).equals(parent(n))) { m = m - 1 n = n - 1 } } enroll(cause, trace, m) ++ Option(cause.getCause).map(toString(_, trace)).getOrElse(Nil) } private[this] def enroll(ex: Throwable, trace: Array[StackTraceElement], max: Int): List[String] = { val builder = new StringBuilder builder.append('*').append(ex.getClass.getName).append(':') builder.append(ex.getMessage).append(':') builder.append(trace.length).append(':').append(max) List(builder.toString) ++ (0 to max).map { i => builder.setLength(0) builder.append(trace(i).getClassName).append(":") builder.append(trace(i).getMethodName).append(":") builder.append(Option(trace(i).getFileName).getOrElse("")).append(':') builder.append(trace(i).getLineNumber) builder.toString }.toList } }
Example 26
Source File: KyuubiSQLException.scala From kyuubi with Apache License 2.0 | 5 votes |
package org.apache.kyuubi import java.sql.SQLException import scala.collection.JavaConverters._ import org.apache.hive.service.rpc.thrift.{TStatus, TStatusCode} case class KyuubiSQLException(msg: String, cause: Throwable) extends SQLException(msg, cause) { def toTStatus: TStatus = { val tStatus = new TStatus(TStatusCode.ERROR_STATUS) tStatus.setSqlState(getSQLState) tStatus.setErrorCode(getErrorCode) tStatus.setErrorMessage(getMessage) tStatus.setInfoMessages(KyuubiSQLException.toString(this).asJava) tStatus } } object KyuubiSQLException { def apply(cause: Throwable): KyuubiSQLException = { new KyuubiSQLException(cause.getMessage, cause) } def apply(msg: String): KyuubiSQLException = new KyuubiSQLException(msg, null) def toTStatus(e: Exception): TStatus = e match { case k: KyuubiSQLException => k.toTStatus case _ => val tStatus = new TStatus(TStatusCode.ERROR_STATUS) tStatus.setErrorMessage(e.getMessage) tStatus.setInfoMessages(toString(e).asJava) tStatus } def toString(cause: Throwable): List[String] = { toString(cause, null) } def toString(cause: Throwable, parent: Array[StackTraceElement]): List[String] = { val trace = cause.getStackTrace var m = trace.length - 1 if (parent != null) { var n = parent.length - 1 while (m >= 0 && n >=0 && trace(m).equals(parent(n))) { m = m - 1 n = n - 1 } } enroll(cause, trace, m) ++ Option(cause.getCause).map(toString(_, trace)).getOrElse(Nil) } private def enroll( ex: Throwable, trace: Array[StackTraceElement], max: Int): List[String] = { val builder = new StringBuilder builder.append('*').append(ex.getClass.getName).append(':') builder.append(ex.getMessage).append(':') builder.append(trace.length).append(':').append(max) List(builder.toString) ++ (0 to max).map { i => builder.setLength(0) builder.append(trace(i).getClassName).append(":") builder.append(trace(i).getMethodName).append(":") builder.append(Option(trace(i).getFileName).getOrElse("")).append(':') builder.append(trace(i).getLineNumber) builder.toString }.toList } }
Example 27
Source File: JDBCSink.scala From BigData-News with Apache License 2.0 | 5 votes |
package com.vita.spark import java.sql.{Connection, ResultSet, SQLException, Statement} import org.apache.log4j.{LogManager, Logger} import org.apache.spark.sql.{ForeachWriter, Row} /** * 处理从StructuredStreaming中向mysql中写入数据 */ class JDBCSink(url: String, username: String, password: String) extends ForeachWriter[Row] { var statement: Statement = _ var resultSet: ResultSet = _ var connection: Connection = _ override def open(partitionId: Long, version: Long): Boolean = { connection = new MySqlPool(url, username, password).getJdbcConn() statement = connection.createStatement(); print("open") return true } override def process(value: Row): Unit = { println("process step one") val titleName = value.getAs[String]("titleName").replaceAll("[\\[\\]]", "") val count = value.getAs[Long]("count") val querySql = "select 1 from webCount where titleName = '" + titleName + "'" val insertSql = "insert into webCount(titleName,count) values('" + titleName + "' , '" + count + "')" val updateSql = "update webCount set count = " + count + " where titleName = '" + titleName + "'" println("process step two") try { //查看连接是否成功 var resultSet = statement.executeQuery(querySql) if (resultSet.next()) { println("updateSql") statement.executeUpdate(updateSql) } else { println("insertSql") statement.execute(insertSql) } } catch { case ex: SQLException => { println("SQLException") } case ex: Exception => { println("Exception") } case ex: RuntimeException => { println("RuntimeException") } case ex: Throwable => { println("Throwable") } } } override def close(errorOrNull: Throwable): Unit = { if (statement == null) { statement.close() } if (connection == null) { connection.close() } } }
Example 28
Source File: LensUtils.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.lenses import java.sql.SQLException import mimir.algebra._ import mimir.models._ import mimir.ctables._ object LensUtils { def extractModelsByColumn( modelMap: Seq[ ( ID, // Model Group Seq[ ( ID, // Column ( Model, // Model Name Int, // Model Index Seq[Expression] // Hint Expressions ) ) ]) ] ): (Map[ID,Seq[(ID,Int,Seq[Expression],ID)]], Seq[Model]) = { val modelsByColumn: Seq[(ID, (ID, Int, Seq[Expression], ID))] = modelMap. flatMap { case (modelGroup, perColumnImplementations) => perColumnImplementations.map { case (col:ID, (model, idx, hints)) => (col, (model.name, idx, hints, modelGroup)) } } val candidateModels:Map[ID, Seq[(ID, Int, Seq[Expression], ID)]] = modelsByColumn .groupBy { _._1 } // Group candidates by target column .mapValues { _.map(_._2) } // Convert to Column -> List[(Model, Idx, Category)] val allModels: Seq[Model] = modelMap // Start with the models .flatMap { _._2.map { _._2._1 } } val modelEntities: Seq[Model] = allModels // We only care about the models themselves .groupBy( _.name ) // Organize by model name .toSeq .map { (_:(ID, Seq[Model]))._2.head } // We only need one copy of each model (candidateModels, modelEntities) } def buildMetaModel( metaModel: ID, metaModelIdx: Int, metaModelArgs: Seq[Expression], metaModelHints: Seq[Expression], inputModels: Seq[(ID,Int,Seq[Expression],ID)], inputArgs: Seq[Expression] ): Expression = { val inputVGTerms = inputModels. map({case (model, idx, hints, cat) => ( StringPrimitive(cat.id), VGTerm(model, idx, inputArgs, hints) )}) inputVGTerms match { case Nil => throw new SQLException("No valid models to be wrapped") case List((_, term)) => term case _ => ExpressionUtils.makeCaseExpression( VGTerm(metaModel, metaModelIdx, metaModelArgs, metaModelHints), inputVGTerms, NullPrimitive() ) } } }
Example 29
Source File: MimirFunction.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.exec.sqlite; import java.sql.SQLException import mimir.algebra._ import mimir.algebra.Type._ import mimir.util._ abstract class MimirFunction extends org.sqlite.Function { def value_mimir(idx: Int): PrimitiveValue = value_mimir(idx, TAny()) def value_mimir(idx: Int, t:Type): PrimitiveValue = { if(value_type(idx) == SQLiteCompat.NULL){ NullPrimitive() } else { t match { case TInt() => IntPrimitive(value_int(idx)) case TFloat() => FloatPrimitive(value_double(idx)) case TAny() => value_type(idx) match { case SQLiteCompat.INTEGER => IntPrimitive(value_int(idx)) case SQLiteCompat.FLOAT => FloatPrimitive(value_double(idx)) case SQLiteCompat.TEXT | SQLiteCompat.BLOB => StringPrimitive(value_text(idx)) } case _ => TextUtils.parsePrimitive(t, value_text(idx)) }} } def return_mimir(p: PrimitiveValue): Unit = { p match { case IntPrimitive(i) => result(i) case FloatPrimitive(f) => result(f) case StringPrimitive(s) => result(s) case d:DatePrimitive => result(d.asString) case BoolPrimitive(true) => result(1) case BoolPrimitive(false) => result(0) case RowIdPrimitive(r) => result(r) case t:TimestampPrimitive => result(t.asString) case i:IntervalPrimitive => result(i.asString) case TypePrimitive(t) => result(Type.toString(t)) case NullPrimitive() => result() } } } abstract class SimpleMimirFunction(argTypes: List[Type]) extends MimirFunction { def apply(args: List[PrimitiveValue]): PrimitiveValue override def xFunc(): Unit = { return_mimir( apply( argTypes.zipWithIndex.map( { case (t, i) => value_mimir(i, t) } ) ) ) } }
Example 30
Source File: LazyRow.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.exec.result import java.sql.SQLException import mimir.algebra._ case class LazyRow( input: Row, tupleDefinition: Seq[Row=>PrimitiveValue], annotationDefinition: Seq[Row=>PrimitiveValue], val tupleSchema: Seq[(ID, Type)], val annotationIndexes: Map[ID,Int] ) extends Row { def tuple: Seq[PrimitiveValue] = tupleDefinition.map { _(input) } def apply(idx: Int): PrimitiveValue = { try { tupleDefinition(idx)(input) } catch { case _:IndexOutOfBoundsException => throw new SQLException(s"$tuple does not have a column $idx") case e:Throwable => throw new RuntimeException( s"Error Decoding ${tupleSchema(idx)._1} (${tupleSchema(idx)._2})", e ) } } def annotation(name: ID): PrimitiveValue = annotation(annotationIndexes(name)) def annotation(idx: Int): PrimitiveValue = annotationDefinition(idx)(input) }
Example 31
package mimir.exec.result import java.sql.SQLException import mimir.algebra._ import mimir.provenance._ import mimir.ctables._ trait Row { def tuple: Seq[PrimitiveValue] def tupleMap: Map[ID,PrimitiveValue] = tupleSchema.zip(tuple).map { x => (x._1._1 -> x._2) }.toMap def apply(idx: Int): PrimitiveValue def apply(name: ID): PrimitiveValue = { val idx = tupleSchema.indexWhere( _._1.equals(name) ) if(idx < 0){ throw new SQLException(s"Field '$name' not in tuple: ${tupleSchema.map(_._1)}") } else { return apply(idx) } } def annotation(idx: Int): PrimitiveValue def annotation(name: ID): PrimitiveValue def tupleSchema: Seq[(ID,Type)] def provenance: RowIdPrimitive = annotation( Provenance.rowidColnameBase) match { case NullPrimitive() => RowIdPrimitive("") case x => RowIdPrimitive(x.asString) } def isDeterministic(): Boolean = annotation(OperatorDeterminism.mimirRowDeterministicColumnName) match { case NullPrimitive() => false case BoolPrimitive(t) => t case IntPrimitive(i) => i match { case 1 => true case 0 => false case -1 => false case _ => throw new RAException("Error getting determinism") } case _ => throw new RAException("Error getting determinism") } def isColDeterministic(col: ID): Boolean = annotation(OperatorDeterminism.mimirColDeterministicColumn(col)) match { case NullPrimitive() => false case BoolPrimitive(t) => t case IntPrimitive(i) => i match { case 1 => true case 0 => false case -1 => false case _ => throw new RAException("Error getting determinism") } case _ => throw new RAException("Error getting determinism") } def isColDeterministic(idx: Int): Boolean = isColDeterministic(tupleSchema(idx)._1) override def toString: String = "<" + tupleSchema.zip(tuple).map { case ((name,_),v) => name+":"+v }.mkString(", ") + ">" }
Example 32
Source File: LocalFSRawFileProvider.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.data.staging import java.net.URL import java.io.{ File, InputStream, OutputStream, FileOutputStream } import java.sql.SQLException import scala.util.Random import com.typesafe.scalalogging.LazyLogging import org.apache.spark.sql.DataFrame import mimir.algebra.ID private def transferBytes(input: InputStream, output: OutputStream): Unit = { val buffer = Array.ofDim[Byte](1024*1024) // 1MB buffer var bytesRead = input.read(buffer) while(bytesRead >= 0) { output.write(buffer, 0, bytesRead) bytesRead = input.read(buffer) } } def stage(input: InputStream, fileExtension: String, nameHint: Option[String]): String = { val file = makeName(fileExtension, nameHint) transferBytes(input, new FileOutputStream(file)) return file.toString } def stage(url: URL, nameHint: Option[String]): String = { val pathComponents = url.getPath.split("/") val nameComponents = pathComponents.reverse.head.split(".") val extension = if(nameComponents.size > 1) { nameComponents.reverse.head } else { "data" } // default to generic 'data' if there's no extension stage(url.openStream(), extension, nameHint) } def stage(input: DataFrame, format: ID, nameHint:Option[String]): String = { val targetFile = makeName(format.id, nameHint).toString input.write .format(format.id) .save(targetFile) return targetFile } def drop(local: String): Unit = { new File(local).delete() } }
Example 33
Source File: HDFSRawFileProvider.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.data.staging import java.net.URL import java.io.{ File, InputStream, OutputStream, FileOutputStream } import java.sql.SQLException import scala.util.Random import com.typesafe.scalalogging.LazyLogging import org.apache.spark.sql.DataFrame import mimir.algebra.ID import mimir.util.HadoopUtils import mimir.exec.spark.MimirSpark private def makeName(extension: String, nameHint: Option[String]): File = { val rand = new Random().alphanumeric // Try 1000 times to create a randomly named file for(i <- 0 until 1000){ val candidate = new File(basePath, nameHint match { case Some(hint) => s"${hint.replaceAll("[^a-zA-Z0-9]", "")}-${rand.take(10).mkString}.${extension}" case None => s"${rand.take(20).mkString}.${extension}" } ) // If the randomly named file doesn't exist, we're done. if(!candidate.exists()){ return candidate } } // Fail after 1000 attempts. throw new SQLException(s"Can't allocate name for $nameHint") } def stage(input: InputStream, fileExtension: String, nameHint: Option[String]): String = { val file = makeName(fileExtension, nameHint) logger.debug("Stage File To HDFS: " +hdfsHome+File.separator+file.toString) //if(!HadoopUtils.fileExistsHDFS(sparkSql.sparkSession.sparkContext, fileName)) HadoopUtils.writeToHDFS(MimirSpark.get.sparkSession.sparkContext, file.getName, input, true) logger.debug("... done\n") return s"$hdfsHome/${file.getName}" } def stage(url: URL, nameHint: Option[String]): String = { val pathComponents = url.getPath.split("/") val nameComponents = pathComponents.reverse.head.split(".") val extension = if(nameComponents.size > 1) { nameComponents.reverse.head } else { "data" } // default to generic 'data' if there's no extension stage(url.openStream(), extension, nameHint) } def stage(input: DataFrame, format: ID, nameHint:Option[String]): String = { val targetFile = makeName(format.id, nameHint).toString input.write .format(format.id) .save(targetFile) return targetFile } def drop(local: String): Unit = { new File(local).delete() } }
Example 34
Source File: SampleFunctions.scala From mimir with Apache License 2.0 | 5 votes |
package mimir.algebra.function; import mimir.exec.mode.{TupleBundle,WorldBits} import mimir.algebra._ import java.sql.SQLException object SampleFunctions { def register(fr: FunctionRegistry) { fr.register(ID("best_sample"), (args: Seq[PrimitiveValue]) => { TupleBundle.mostLikelyValue( args.head.asLong, args.tail.grouped(2). map { arg => (arg(1), arg(0).asDouble) }.toSeq ) match { case Some(v) => v case None => NullPrimitive() } }, (types: Seq[Type]) => { val debugExpr = Function("best_sample", types.map(TypePrimitive(_)):_*) Typechecker.assertNumeric(types.head, debugExpr) Typechecker.assertLeastUpperBound( types.tail.grouped(2). map { t => Typechecker.assertNumeric(t(0),debugExpr) t(1) }, "BEST_SAMPLE", debugExpr ) } ) fr.register(ID("sample_confidence"), (args: Seq[PrimitiveValue]) => FloatPrimitive( WorldBits.confidence(args(0).asLong, args(0).asLong.toInt) ), (types: Seq[Type]) => { Typechecker.assertNumeric(types(0), Function("sample_confidence", types.map(TypePrimitive(_)):_*)) Typechecker.assertNumeric(types(1), Function("sample_confidence", types.map(TypePrimitive(_)):_*)) TFloat() } ) fr.register(ID("possion"), { case Seq(IntPrimitive(m)) => { IntPrimitive(mimir.exec.sqlite.Possion.poisson_helper(m)) } case Seq(FloatPrimitive(m)) => { IntPrimitive(mimir.exec.sqlite.Possion.poisson_helper(m)) } case Seq(NullPrimitive()) => NullPrimitive() case x => throw new SQLException("Non-numeric parameter to possion: '"+x+"'") }, ((args: Seq[Type]) => TInt()) ) fr.register(ID("gamma"), { case Seq(FloatPrimitive(k), FloatPrimitive(theta)) => { FloatPrimitive(mimir.exec.sqlite.Gamma.sampleGamma(k, theta)) } case Seq(NullPrimitive(), FloatPrimitive(r)) => NullPrimitive() case Seq(FloatPrimitive(r), NullPrimitive()) => NullPrimitive() case Seq(NullPrimitive(), NullPrimitive()) => NullPrimitive() case x => throw new SQLException("Non-numeric parameter to gamma: '"+x+"'") }, ((args: Seq[Type]) => TFloat()) ) } }