scala.util.parsing.combinator.RegexParsers Scala Examples

The following examples show how to use scala.util.parsing.combinator.RegexParsers. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: CompositionParser.scala    From sfseize   with Apache License 2.0 5 votes vote down vote up
package org.eichelberger.sfc.utils

import com.typesafe.scalalogging.Logging
import org.eichelberger.sfc.SpaceFillingCurve.{Composable, OrdinalVector, OrdinalNumber, SpaceFillingCurve}
import org.eichelberger.sfc._
import scala.util.parsing.combinator.RegexParsers


object CompositionParser extends RegexParsers {
  val LPAREN = "("
  val RPAREN = ")"
  val COMMA = ","

  val R_CURVE_NAME = """(?i)r""".r
  val Z_CURVE_NAME = """(?i)z""".r
  val H_CURVE_NAME = """(?i)h""".r

  def curveName: Parser[String] = R_CURVE_NAME | Z_CURVE_NAME | H_CURVE_NAME ^^ { _.toString }

  def precision: Parser[Int] = """\d+""".r ^^ { _.toInt }

  case class PrecisionOrCurve(precisionOpt: Option[Int], curveOpt: Option[SpaceFillingCurve])

  def childArg: Parser[PrecisionOrCurve] = (precision | curveParser) ^^ {
    case p: Int               => PrecisionOrCurve(Some(p), None)
    case c: SpaceFillingCurve => PrecisionOrCurve(None, Some(c))
  }

  def curveParser: Parser[ComposedCurve] = curveName ~ LPAREN ~ repsep(childArg, COMMA) ~ RPAREN ^^ {
    case name ~ lp ~ children ~ rp =>
      val precisions = OrdinalVector(children.flatMap {
        case PrecisionOrCurve(Some(p), None) => Seq(p.toLong)
        case PrecisionOrCurve(None, Some(c)) => c.precisions.toSeq
      }:_*)
      val curve = name match {
        case s: String if s.matches(R_CURVE_NAME.toString()) => new RowMajorCurve(precisions)
        case s: String if s.matches(Z_CURVE_NAME.toString()) => new ZCurve(precisions)
        case s: String if s.matches(H_CURVE_NAME.toString()) => new CompactHilbertCurve(precisions)
      }
      val childParams: Seq[Composable] = children.map {
        case PrecisionOrCurve(Some(p), None) => DefaultDimensions.createIdentityDimension(p)
        case PrecisionOrCurve(None, Some(c)) => c
      }
      new ComposedCurve(curve, childParams)
    }

  def buildWholeNumberCurve(s: String): ComposedCurve = parse(curveParser, s).get
}

case class CompositionParserException(msg: String) extends Exception(msg) 
Example 2
Source File: PfsSolutionParser.scala    From hyperspark   with Apache License 2.0 5 votes vote down vote up
package pfsp.util

import scala.util.parsing.combinator.RegexParsers
import pfsp.solution.PfsSolution
import pfsp.solution.PfsEvaluatedSolution

object PfsSolutionParser extends RegexParsers {

	def number: Parser[Int] = """\d+""".r ^^ { _.toInt }
	def identifier  = """[_\p{L}][_\p{L}\p{Nd}]*""".r	
	def row: Parser[Array[Int]] = number.+ ^^ {_.toArray}
	def solution: Parser[PfsSolution] = identifier ~> number ~ row ^^ {
	  case ms ~ r => new PfsSolution(r)
	}	
	def apply(input: String): Option[PfsSolution] = parseAll(solution, input) match {
    	case Success(result, _) => Some(result)
    	case NoSuccess(_, _) => None
	}

}

object PfsEvaluatedSolutionParser extends RegexParsers {

	def number: Parser[Int] = """\d+""".r ^^ { _.toInt }
	def identifier  = """[_\p{L}][_\p{L}\p{Nd}]*""".r	
	def row: Parser[Array[Int]] = number.+ ^^ {_.toArray}
	def solution: Parser[PfsEvaluatedSolution] = identifier ~> number ~ row ^^ {
	  case ms ~ r => new PfsEvaluatedSolution(ms,r)
	}	
	def apply(input: String): Option[PfsEvaluatedSolution] = parseAll(solution, input) match {
    	case Success(result, _) => Some(result)
    	case NoSuccess(_, _) => None
	}

} 
Example 3
Source File: PfsProblemParser.scala    From hyperspark   with Apache License 2.0 5 votes vote down vote up
package pfsp.util

import scala.util.parsing.combinator.RegexParsers
import pfsp.problem.PfsProblem

object PfsProblemParser extends RegexParsers {

	def number: Parser[Int] = """\d+""".r ^^ { _.toInt }
	def params: Parser[(Int,Int)] = number ~ number <~ "x" ^^ {case x ~ y => (x,y)}
	def row: Parser[Array[Int]] = number.+ <~ "x" ^^ {_.toArray}
	def matrix: Parser[Array[Array[Int]]] = row.+ ^^ {_.toArray}
	def problem: Parser[PfsProblem] = params ~ matrix ^^ {
	  case p ~ m => new PfsProblem(p._1,p._2,m)
	}
	def apply(input: String): Option[PfsProblem] = parseAll(problem, input) match {
    	case Success(result, _) => Some(result)
    	case NoSuccess(_, _) => None
	}
}

object DelphiProblemParser extends RegexParsers {
  //Source.fromFile(path).getLines().mkString(" x ") + " x ").getOrElse(throw new RuntimeException("ParserError")
  def number: Parser[Int] = """\d+""".r ^^ { _.toInt }
  def params: Parser[(Int,Int)] = number ~ number <~ "x" ^^ {case x ~ y => (x,y)}
  def entry: Parser[Int] = number ~ number ^^ {case x ~ y => y}
  def row: Parser[Array[Int]] = entry.+ <~ "x" ^^ {_.toArray}
  def matrix: Parser[Array[Array[Int]]] = row.+ ^^ {_.toArray}
  def getColumn(ind: Int, m: Array[Array[Int]]): Array[Int] = m.map{_(ind)}
  def transpose(m: Array[Array[Int]]): Array[Array[Int]] = {
   var transposed: Array[Array[Int]] = Array()
   val numRows = m(0).size//5
   for(i <- 0 until numRows)
     transposed :+= getColumn(i, m)
   transposed
  }
  def problem: Parser[PfsProblem] = params ~ matrix ^^ {
    case p ~ m => new PfsProblem(p._1,p._2,transpose(m))
  }
  def apply(input: String): Option[PfsProblem] = parseAll(problem, input) match {
      case Success(result, _) => Some(result)
      case NoSuccess(_, _) => None
  }
} 
Example 4
Source File: BaseParser.scala    From berilia   with Apache License 2.0 5 votes vote down vote up
package com.criteo.dev.cluster.utils.ddl

import scala.util.parsing.combinator.RegexParsers

trait BaseParser extends RegexParsers {
  def validName: Parser[String] = "`".? ~> "[A-Za-z0-9_]+".r  <~ "`".?

  def hiveStringLiteral: Parser[String] = ("'" | "\"") ~> "[^'\"]*".r <~ ("'" | "\"")

  def properties(delimiter: String = "="): Parser[Map[String, String]] = "(" ~> repsep(hiveStringLiteral ~ delimiter ~ hiveStringLiteral, ",") <~ ")" ^^ {
    _.map { case k ~ _ ~ v =>
      (k, v)
    }.toMap
  }

  def comment: Parser[String] = "comment" ~> hiveStringLiteral

  def int: Parser[Int] = "\\d+".r ^^ (_.toInt)

  // parser for case insensitive string literal
  implicit def caseInsensitiveLiteral(s: String): Parser[String] = new Parser[String] {
    def apply(in: Input) = {
      val source = in.source
      val offset = in.offset
      val start = handleWhiteSpace(source, offset)
      var i = 0
      var j = start
      while (i < s.length && j < source.length && s.charAt(i).toLower == source.charAt(j).toLower) {
        i += 1
        j += 1
      }
      if (i == s.length)
        Success(source.subSequence(start, j).toString, in.drop(j - offset))
      else {
        val found = if (start == source.length()) "end of source" else "`" + source.charAt(start) + "'"
        Failure("`" + s + "' expected but " + found + " found", in.drop(start - offset))
      }
    }
  }
} 
Example 5
Source File: SparkSQLParser.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import scala.util.parsing.combinator.RegexParsers

import org.apache.spark.sql.catalyst.AbstractSparkSQLParser
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.types.StringType


class SparkSQLParser(fallback: String => LogicalPlan) extends AbstractSparkSQLParser {

  // A parser for the key-value part of the "SET [key = [value ]]" syntax
  private object SetCommandParser extends RegexParsers {
    private val key: Parser[String] = "(?m)[^=]+".r

    private val value: Parser[String] = "(?m).*$".r

    private val output: Seq[Attribute] = Seq(AttributeReference("", StringType, nullable = false)())

    private val pair: Parser[LogicalPlan] =
      (key ~ ("=".r ~> value).?).? ^^ {
        case None => SetCommand(None)
        case Some(k ~ v) => SetCommand(Some(k.trim -> v.map(_.trim)))
      }

    def apply(input: String): LogicalPlan = parseAll(pair, input) match {
      case Success(plan, _) => plan
      case x => sys.error(x.toString)
    }
  }

  protected val AS = Keyword("AS")
  protected val CACHE = Keyword("CACHE")
  protected val CLEAR = Keyword("CLEAR")
  protected val DESCRIBE = Keyword("DESCRIBE")
  protected val EXTENDED = Keyword("EXTENDED")
  protected val FUNCTION = Keyword("FUNCTION")
  protected val FUNCTIONS = Keyword("FUNCTIONS")
  protected val IN = Keyword("IN")
  protected val LAZY = Keyword("LAZY")
  protected val SET = Keyword("SET")
  protected val SHOW = Keyword("SHOW")
  protected val TABLE = Keyword("TABLE")
  protected val TABLES = Keyword("TABLES")
  protected val UNCACHE = Keyword("UNCACHE")

  override protected lazy val start: Parser[LogicalPlan] =
    cache | uncache | set | show | desc | others

  private lazy val cache: Parser[LogicalPlan] =
    CACHE ~> LAZY.? ~ (TABLE ~> ident) ~ (AS ~> restInput).? ^^ {
      case isLazy ~ tableName ~ plan =>
        CacheTableCommand(tableName, plan.map(fallback), isLazy.isDefined)
    }

  private lazy val uncache: Parser[LogicalPlan] =
    ( UNCACHE ~ TABLE ~> ident ^^ {
        case tableName => UncacheTableCommand(tableName)
      }
    | CLEAR ~ CACHE ^^^ ClearCacheCommand
    )

  private lazy val set: Parser[LogicalPlan] =
    SET ~> restInput ^^ {
      case input => SetCommandParser(input)
    }

  // It can be the following patterns:
  // SHOW FUNCTIONS;
  // SHOW FUNCTIONS mydb.func1;
  // SHOW FUNCTIONS func1;
  // SHOW FUNCTIONS `mydb.a`.`func1.aa`;
  private lazy val show: Parser[LogicalPlan] =
    ( SHOW ~> TABLES ~ (IN ~> ident).? ^^ {
        case _ ~ dbName => ShowTablesCommand(dbName)
      }
    | SHOW ~ FUNCTIONS ~> ((ident <~ ".").? ~ (ident | stringLit)).? ^^ {
        case Some(f) => logical.ShowFunctions(f._1, Some(f._2))
        case None => logical.ShowFunctions(None, None)
      }
    )

  private lazy val desc: Parser[LogicalPlan] =
    DESCRIBE ~ FUNCTION ~> EXTENDED.? ~ (ident | stringLit) ^^ {
      case isExtended ~ functionName => logical.DescribeFunction(functionName, isExtended.isDefined)
    }

  private lazy val others: Parser[LogicalPlan] =
    wholeInput ^^ {
      case input => fallback(input)
    }

} 
Example 6
Source File: CommandStatementParser.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.sql.parser

import io.radicalbit.nsdb.common.statement._
import io.radicalbit.nsdb.sql.parser.StatementParserResult._

import scala.util.parsing.combinator.RegexParsers


class CommandStatementParser(db: String) extends RegexParsers {

  implicit class InsensitiveString(str: String) {
    def ignoreCase: Parser[String] = ("""(?i)\Q""" + str + """\E""").r ^^ { _.toUpperCase }
  }

  private val Describe   = "DESCRIBE" ignoreCase
  private val Metrics    = "METRICS" ignoreCase
  private val Namespaces = "NAMESPACES" ignoreCase
  private val Show       = "SHOW" ignoreCase
  private val Use        = "USE" ignoreCase

  private val namespace = """(^[a-zA-Z][a-zA-Z0-9_]*)""".r
  private val metric    = """(^[a-zA-Z][a-zA-Z0-9_]*)""".r

  private def showNamespaces = Show ~ Namespaces ^^ { _ =>
    ShowNamespaces
  }

  private def useNamespace = Use ~> namespace ^^ { ns =>
    UseNamespace(ns)
  }

  private def showMetrics(namespace: Option[String]) =
    Show ~ Metrics ^? ({
      case _ if namespace.isDefined => ShowMetrics(db, namespace.get)
    }, _ => "Please select a valid namespace to list the associated metrics.")

  private def describeMetric(namespace: Option[String]): Parser[DescribeMetric] =
    Describe ~> metric ^? ({
      case m if namespace.isDefined => DescribeMetric(db, namespace = namespace.get, metric = m)
    }, _ => "Please select a valid namespace to describe the given metric.")

  private def commands(namespace: Option[String]) =
    showNamespaces | useNamespace | showMetrics(namespace) | describeMetric(namespace)

  def parse(namespace: Option[String], input: String): CommandStatementParserResult =
    parse(commands(namespace), s"$input;") match {
      case Success(res, _) => CommandStatementParserSuccess(input, res)
      case Error(msg, _)   => CommandStatementParserFailure(input, msg)
      case Failure(msg, _) => CommandStatementParserFailure(input, msg)
    }
} 
Example 7
Source File: Console.scala    From reactive-application-development-scala   with Apache License 2.0 5 votes vote down vote up
package com.rarebooks.library

import scala.util.parsing.combinator.RegexParsers

trait Console {

  protected sealed trait Command

  protected object Command {
    case class Customer(count: Int, odds: Int, tolerance: Int) extends Command
    case object Quit extends Command
    case class Unknown(command: String) extends Command
    def apply(command: String): Command = CommandParser.parseCommand(command)
  }

  private object CommandParser extends RegexParsers {

    def parseCommand(s: String): Command =
      parseAll(parser, s) match {
        case Success(command, _) => command
        case _                   => Command.Unknown(s)
      }

    def createCustomer: Parser[Command.Customer] =
      opt(int) ~ ("customer|c".r ~> opt(int) ~ opt(int)) ^^ {
        case count ~ (odds ~ tolerance) =>
          Command.Customer(
            count getOrElse 1,
            odds getOrElse 75,
            tolerance getOrElse 3)
      }

    def quit: Parser[Command.Quit.type] =
      "quit|q".r ^^ (_ => Command.Quit)

    def int: Parser[Int] =
      """\d+""".r ^^ (_.toInt)
  }

  private val parser: CommandParser.Parser[Command] =
    CommandParser.createCustomer | CommandParser.quit
} 
Example 8
Source File: SparkSQLParser.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql

import scala.util.parsing.combinator.RegexParsers

import org.apache.spark.sql.catalyst.AbstractSparkSQLParser
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical.{DescribeFunction, LogicalPlan, ShowFunctions}
import org.apache.spark.sql.execution._
import org.apache.spark.sql.types.StringType



private[sql] class SparkSQLParser(fallback: String => LogicalPlan) extends AbstractSparkSQLParser {

  // A parser for the key-value part of the "SET [key = [value ]]" syntax
  //用于“SET [key = [value]]”语法的键值部分的解析器
  private object SetCommandParser extends RegexParsers {
    private val key: Parser[String] = "(?m)[^=]+".r

    private val value: Parser[String] = "(?m).*$".r

    private val output: Seq[Attribute] = Seq(AttributeReference("", StringType, nullable = false)())

    private val pair: Parser[LogicalPlan] =
      (key ~ ("=".r ~> value).?).? ^^ {
        case None => SetCommand(None)
        case Some(k ~ v) => SetCommand(Some(k.trim -> v.map(_.trim)))
      }

    def apply(input: String): LogicalPlan = parseAll(pair, input) match {
      case Success(plan, _) => plan
      case x => sys.error(x.toString)
    }
  }

  protected val AS = Keyword("AS")
  protected val CACHE = Keyword("CACHE")
  protected val CLEAR = Keyword("CLEAR")
  protected val DESCRIBE = Keyword("DESCRIBE")
  protected val EXTENDED = Keyword("EXTENDED")
  protected val FUNCTION = Keyword("FUNCTION")
  protected val FUNCTIONS = Keyword("FUNCTIONS")
  protected val IN = Keyword("IN")
  protected val LAZY = Keyword("LAZY")
  protected val SET = Keyword("SET")
  protected val SHOW = Keyword("SHOW")
  protected val TABLE = Keyword("TABLE")
  protected val TABLES = Keyword("TABLES")
  protected val UNCACHE = Keyword("UNCACHE")

  override protected lazy val start: Parser[LogicalPlan] =
    cache | uncache | set | show | desc | others

  private lazy val cache: Parser[LogicalPlan] =
    CACHE ~> LAZY.? ~ (TABLE ~> ident) ~ (AS ~> restInput).? ^^ {
      case isLazy ~ tableName ~ plan =>
        CacheTableCommand(tableName, plan.map(fallback), isLazy.isDefined)
    }

  private lazy val uncache: Parser[LogicalPlan] =
    ( UNCACHE ~ TABLE ~> ident ^^ {
        case tableName => UncacheTableCommand(tableName)
      }
    | CLEAR ~ CACHE ^^^ ClearCacheCommand
    )

  private lazy val set: Parser[LogicalPlan] =
    SET ~> restInput ^^ {
      case input => SetCommandParser(input)
    }

  // It can be the following patterns:
  // SHOW FUNCTIONS;显示函数
  // SHOW FUNCTIONS mydb.func1;
  // SHOW FUNCTIONS func1;
  // SHOW FUNCTIONS `mydb.a`.`func1.aa`;
  private lazy val show: Parser[LogicalPlan] =
    ( SHOW ~> TABLES ~ (IN ~> ident).? ^^ {
        case _ ~ dbName => ShowTablesCommand(dbName)
      }
    | SHOW ~ FUNCTIONS ~> ((ident <~ ".").? ~ (ident | stringLit)).? ^^ {
        case Some(f) => ShowFunctions(f._1, Some(f._2))
        case None => ShowFunctions(None, None)
      }
    )

  private lazy val desc: Parser[LogicalPlan] =
    DESCRIBE ~ FUNCTION ~> EXTENDED.? ~ (ident | stringLit) ^^ {
      case isExtended ~ functionName => DescribeFunction(functionName, isExtended.isDefined)
    }

  private lazy val others: Parser[LogicalPlan] =
    wholeInput ^^ {
      case input => fallback(input)
    }

} 
Example 9
Source File: SparkSQLParser.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql

import scala.util.parsing.combinator.RegexParsers

import org.apache.spark.sql.catalyst.AbstractSparkSQLParser
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution._
import org.apache.spark.sql.types.StringType



private[sql] class SparkSQLParser(fallback: String => LogicalPlan) extends AbstractSparkSQLParser {

  // A parser for the key-value part of the "SET [key = [value ]]" syntax
  private object SetCommandParser extends RegexParsers {
    private val key: Parser[String] = "(?m)[^=]+".r

    private val value: Parser[String] = "(?m).*$".r

    private val output: Seq[Attribute] = Seq(AttributeReference("", StringType, nullable = false)())

    private val pair: Parser[LogicalPlan] =
      (key ~ ("=".r ~> value).?).? ^^ {
        case None => SetCommand(None, output)
        case Some(k ~ v) => SetCommand(Some(k.trim -> v.map(_.trim)), output)
      }

    def apply(input: String): LogicalPlan = parseAll(pair, input) match {
      case Success(plan, _) => plan
      case x => sys.error(x.toString)
    }
  }

  protected val AS = Keyword("AS")
  protected val CACHE = Keyword("CACHE")
  protected val CLEAR = Keyword("CLEAR")
  protected val IN = Keyword("IN")
  protected val LAZY = Keyword("LAZY")
  protected val SET = Keyword("SET")
  protected val SHOW = Keyword("SHOW")
  protected val TABLE = Keyword("TABLE")
  protected val TABLES = Keyword("TABLES")
  protected val UNCACHE = Keyword("UNCACHE")

  override protected lazy val start: Parser[LogicalPlan] = cache | uncache | set | show | others

  private lazy val cache: Parser[LogicalPlan] =
    CACHE ~> LAZY.? ~ (TABLE ~> ident) ~ (AS ~> restInput).? ^^ {
      case isLazy ~ tableName ~ plan =>
        CacheTableCommand(tableName, plan.map(fallback), isLazy.isDefined)
    }

  private lazy val uncache: Parser[LogicalPlan] =
    ( UNCACHE ~ TABLE ~> ident ^^ {
        case tableName => UncacheTableCommand(tableName)
      }
    | CLEAR ~ CACHE ^^^ ClearCacheCommand
    )

  private lazy val set: Parser[LogicalPlan] =
    SET ~> restInput ^^ {
      case input => SetCommandParser(input)
    }

  private lazy val show: Parser[LogicalPlan] =
    SHOW ~> TABLES ~ (IN ~> ident).? ^^ {
      case _ ~ dbName => ShowTablesCommand(dbName)
    }

  private lazy val others: Parser[LogicalPlan] =
    wholeInput ^^ {
      case input => fallback(input)
    }

} 
Example 10
Source File: DigestHeader.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package security.httpauth

import scala.util.parsing.combinator.RegexParsers
import scala.util.{Success, Try}


trait DigestHeader {
  val realm: String
  val nonce: String
  val opaque: String
}

case class DigestServerHeader(realm: String, nonce: String, opaque: String) extends DigestHeader {
  override def toString = {
    Seq("realm" -> realm, "nonce" -> nonce, "opaque" -> opaque)
      .map { case (key, value) => s"""$key="$value"""" }
      .mkString("Digest ", ",", "")
  }
}

case class DigestClientHeader(realm: String, nonce: String, opaque: String, username: String, response: String)
    extends DigestHeader

object DigestClientHeader {
  private val mandatoryKeys = Set("realm", "nonce", "opaque", "username", "response")

  def fromMap(map: Map[String, String]) = {
    require(mandatoryKeys.forall(map.keySet), "Missing one or more mandatory keys")
    DigestClientHeader(map("realm"), map("nonce"), map("opaque"), map("username"), map("response"))
  }
}

object DigestHeaderUtils {
  def fromClientHeaderString(s: String) = {
    Try(DigestClientHeader.fromMap(DigestHeaderParser.parseHeader(s))) match {
      case Success(dch) => dch
      case _ =>
        throw new IllegalArgumentException(s"$s is not a valid Digest Client Header")
    }
  }
}

object DigestHeaderParser extends RegexParsers {
  private def keyParser: Parser[String] = "[a-zA-Z0-9\"?&/_-]+".r ^^ { _.toString.replace("\"", "") }
  private def valueParser: Parser[String] = "[a-zA-Z0-9\"?=&/_-]+".r ^^ { _.toString.replace("\"", "") }
  private def keyValueParser: Parser[(String, String)] = keyParser ~ "=" ~ valueParser ^^ { case k ~ _ ~ v => k -> v }
  private def digestHeaderParser: Parser[Map[String, String]] = "Digest " ~> repsep(keyValueParser, ",\\s?".r) ^^ {
    _.toMap
  }

  def parseHeader(headerValue: String): Map[String, String] = parse(digestHeaderParser, headerValue) match {
    case Success(map, _) => map
    case _               => Map.empty[String, String]
  }
} 
Example 11
Source File: TlcConfigLexer.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.io.tlc.config

import java.io.Reader

import scala.util.matching.Regex
import scala.util.parsing.combinator.RegexParsers


  def apply(reader: Reader): List[TlcConfigToken] = parseAll(program, reader) match {
    case Success(result, _) => result
    case NoSuccess(msg, next) => throw new TlcConfigParseError(msg, next.pos)
  }

  def program: Parser[List[TlcConfigToken]] = skip ~> rep(token <~ skip) <~ eof

  def eof: Parser[String] = "\\z".r | failure("unexpected character")

  def token: Parser[TlcConfigToken] =
    positioned(
      constant | init | next | specification | invariant | property | constraint | actionConstraint |
        symmetry | leftArrow | eq | identifier
    ) ///

  // it is important that linefeed is not a whiteSpace, as otherwise singleComment consumes the whole input!
  def skip: Parser[Unit] = rep(whiteSpace | singleComment | multiComment | linefeed) ^^^ Unit

  def linefeed: Parser[Unit] = "\n" ^^^ Unit

  def singleComment: Parser[Unit] = "\\*" ~ rep(not("\n") ~ ".".r) ^^^ Unit

  def multiComment: Parser[Unit] = "(*" ~ rep(not("*)") ~ "(?s).".r) ~ "*)" ^^^ Unit

  private def identifier: Parser[IDENT] = {
    "[a-zA-Z_][a-zA-Z0-9_]*".r ^^ { name => IDENT(name) }
  }

  private def constant: Parser[CONST] = {
    "CONSTANT(S|)".r  ^^ (_ => CONST())
  }

  private def init: Parser[INIT] = {
    "INIT"  ^^ (_ => INIT())
  }

  private def next: Parser[NEXT] = {
    "NEXT"  ^^ (_ => NEXT())
  }

  private def specification: Parser[SPECIFICATION] = {
    "SPECIFICATION" ^^ (_ => SPECIFICATION())
  }

  private def invariant: Parser[INVARIANT] = {
    "INVARIANT(S|)".r ^^ (_ => INVARIANT())
  }

  private def property: Parser[PROPERTY] = {
    "PROPERT(Y|IES)".r ^^ (_ => PROPERTY())
  }

  private def constraint: Parser[CONSTRAINT] = {
    "CONSTRAINT(S|)".r ^^ (_ => CONSTRAINT())
  }

  private def actionConstraint: Parser[ACTION_CONSTRAINT] = {
    "ACTION_CONSTRAINT(S|)".r ^^ (_ => ACTION_CONSTRAINT())
  }

  private def symmetry: Parser[SYMMETRY] = {
    "SYMMETRY".r ^^ (_ => SYMMETRY())
  }

  private def leftArrow: Parser[LEFT_ARROW] = {
    "<-" ^^ (_ => LEFT_ARROW())
  }

  private def eq: Parser[EQ] = {
    "=" ^^ (_ => EQ())
  }
} 
Example 12
Source File: PTestFileParser.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package com.sap.spark.util

import scala.util.Try
import scala.util.parsing.combinator.RegexParsers


class PTestFileParser extends RegexParsers {

  def query: Parser[String] = """\$query:""".r ^^ { _.toString }
  def parsed: Parser[String] = """\$parsed:""".r ^^ { _.toString }
  def expect: Parser[String] = """\$expect:""".r ^^ { _.toString }
  def content: Parser[String] = """[^\$]*""".r ^^ { _.toString }

  def ptest: Parser[(String, String, String)] =
    (query ~> content) ~ (parsed ~> content).? ~ (expect ~> content) ^^ {
      case q ~ p ~ e =>
        (q.toString.trim, p.getOrElse("").trim, e.toString.trim)
    }

  def root: Parser[List[(String, String, String)]] =
    rep(ptest)

  def apply(input: String): Try[List[(String, String, String)]] =
    parseAll(root, input) match {
      case Success(result, _) => scala.util.Success(result)
      case Error(msg, _) => scala.util.Failure(new Exception("Could not parse PTest file " + msg))
      case Failure(msg, _) => scala.util.Failure(new Exception("Could not parse PTest file " + msg))
  }
} 
Example 13
Source File: Version.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.tapad.docker

import scala.util.parsing.combinator.RegexParsers

case class Version(major: Int, minor: Int, release: Int)

object Version extends RegexParsers {
  def apply(version: String): Version = {
    parseVersion(version)
  }

  def parseVersion(version: String): Version = {
    parse(parser, version) match {
      case Success(ver, _) => ver
      case NoSuccess(msg, _) => throw new RuntimeException(s"Could not parse Version from $version: $msg")
    }
  }

  private val positiveWholeNumber: Parser[Int] = {
    ("0".r | """[1-9]?\d*""".r).map(_.toInt).withFailureMessage("non-negative integer value expected")
  }

  private val parser: Parser[Version] = {
    positiveWholeNumber ~ ("." ~> positiveWholeNumber) ~ ("." ~> positiveWholeNumber) ^^ {
      case major ~ minor ~ release => Version(major, minor, release)
    }
  }
} 
Example 14
Source File: MongoClientUriParser.scala    From tepkin   with Apache License 2.0 5 votes vote down vote up
package net.fehmicansaglam.tepkin

import java.net.InetSocketAddress

import scala.util.parsing.combinator.RegexParsers

object MongoClientUriParser extends RegexParsers {
  def credential: Parser[String] = """[^:@?]+""".r ^^ {
    _.toString
  }

  def hostName: Parser[String] = """[^:,?/]+""".r ^^ {
    _.toString
  }

  def port: Parser[Int] = """[0-9]+""".r ^^ {
    _.toInt
  }

  def database: Parser[String] = """[^?]+""".r ^^ {
    _.toString
  }

  def option: Parser[(String, String)] = """[^=]+""".r ~ "=" ~ """[^&]+""".r ^^ {
    case key ~ _ ~ value => key -> value
  }

  def options: Parser[Map[String, String]] = option ~ rep("&" ~ option) ^^ {
    case head ~ tail => (head +: tail.map(_._2)).toMap
  }

  def credentials: Parser[MongoCredentials] = credential ~ opt(":" ~ credential) ^^ {
    case username ~ None =>
      MongoCredentials(username = username)
    case username ~ Some(":" ~ password) =>
      MongoCredentials(username = username, password = Some(password))
  }

  def host: Parser[InetSocketAddress] = hostName ~ opt(":" ~ port) ^^ {
    case hostName ~ None => new InetSocketAddress(hostName, 27017)
    case hostName ~ Some(":" ~ port) => new InetSocketAddress(hostName, port)
  }

  def uri: Parser[MongoClientUri] = {
    "mongodb://" ~ opt(credentials ~ "@") ~ host ~ rep("," ~ host) ~ opt("/" ~ opt(database) ~ opt("?" ~ options)) ^^ {
      case _ ~ credentials ~ host ~ hosts ~ None =>
        MongoClientUri(
          credentials = credentials.map(_._1),
          hosts = hosts.map(_._2).toSet + host
        )

      case _ ~ credentials ~ host ~ hosts ~ Some(_ ~ database ~ options) =>
        MongoClientUri(
          credentials = credentials.map(_._1),
          hosts = hosts.map(_._2).toSet + host,
          database = database,
          options = options.map(_._2).getOrElse(Map.empty)
        )
    }
  }
} 
Example 15
Source File: BaseParser.scala    From redshift-fake-driver   with Apache License 2.0 5 votes vote down vote up
package jp.ne.opt.redshiftfake.parse

import jp.ne.opt.redshiftfake.{Credentials, Global}
import jp.ne.opt.redshiftfake.s3.S3Location

import scala.util.parsing.combinator.RegexParsers

trait BaseParser extends RegexParsers {
  val identifier = """[_a-zA-Z]\w*"""

  val dataTypeIdentifier = """[_a-zA-Z]\w*(( )?\([0-9]+\))?"""
  val quotedIdentifier = s"""(?i)($identifier|"$identifier")"""
  val quotedIdentifierParser = "\"".? ~> identifier.r <~ "\"".? ^^ {
    _.replaceAll("\"", "")
  }

  val space = """\s*"""

  val any = """(.|\s)"""

  val s3LocationParser = Global.s3Scheme ~> """[\w-]+""".r ~ ("/" ~> """[\w/:%#$&?()~.=+-]+""".r).? ^^ {
    case ~(bucket, prefix) => S3Location(bucket, prefix.getOrElse(""))
  }

  val awsAuthArgsParser = {
    def parserWithKey = """[\w_]+=""".r ~ """[\w/+=:-]+""".r ~ (";aws_secret_access_key=" ~> """[\w/+=]+""".r).? ^^ {
      case "aws_access_key_id=" ~ accessKeyId ~ Some(secretAccessKey) => Credentials.WithKey(accessKeyId, secretAccessKey)
      case "aws_role_arn=" ~ awsIamRole ~ None => Credentials.WithRole(awsIamRole)
    }

    "'" ~> parserWithKey <~ "'"
  }

  val awsAuthTemporaryParser = {
    "(?i)ACCESS_KEY_ID".r ~ "'" ~ """[\w/+=]+""".r ~ "'" ~ "(?i)SECRET_ACCESS_KEY".r ~ "'" ~ """[\w/+=]+""".r ~ "'" ~ ("(?i)SESSION_TOKEN".r | "(?i)TOKEN".r) ~ "'" ~ """[\w/+=]+""".r ~ "'" ^^ {
      case _ ~ _ ~ accessKeyId ~ _ ~ _ ~ _ ~ secretAccessKey ~ _ ~ _ ~ _ ~ sessionToken ~ _=>
        Credentials.WithTemporaryToken(
          accessKeyId,
          secretAccessKey,
          sessionToken
        )
    }
  }

  val delimiterParser = s"$any*(?i)DELIMITER".r ~> "(?i)AS".r.? ~> "'" ~> """[|,]""".r <~ "'" <~ s"$any*".r ^^ { s => s.head }
} 
Example 16
Source File: CarbonMetastoreTypes.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.util

import scala.util.parsing.combinator.RegexParsers

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.types._

object CarbonMetastoreTypes extends RegexParsers {
  protected lazy val primitiveType: Parser[DataType] =
    "string" ^^^ StringType |
    "varchar" ^^^ StringType |
    "float" ^^^ FloatType |
    "int" ^^^ IntegerType |
    "tinyint" ^^^ ShortType |
    "short" ^^^ ShortType |
    "double" ^^^ DoubleType |
    "long" ^^^ LongType |
    "binary" ^^^ BinaryType |
    "boolean" ^^^ BooleanType |
    fixedDecimalType |
    "decimal" ^^^ "decimal" ^^^ DecimalType(10, 0) |
    "varchar\\((\\d+)\\)".r ^^^ StringType |
    "date" ^^^ DateType |
    "timestamp" ^^^ TimestampType

  protected lazy val fixedDecimalType: Parser[DataType] =
    "decimal" ~> "(" ~> "^[1-9]\\d*".r ~ ("," ~> "^[0-9]\\d*".r <~ ")") ^^ {
      case precision ~ scale =>
        DecimalType(precision.toInt, scale.toInt)
    }

  protected lazy val arrayType: Parser[DataType] =
    "array" ~> "<" ~> dataType <~ ">" ^^ {
      case tpe => ArrayType(tpe)
    }

  protected lazy val mapType: Parser[DataType] =
    "map" ~> "<" ~> dataType ~ "," ~ dataType <~ ">" ^^ {
      case t1 ~ _ ~ t2 => MapType(t1, t2)
    }

  protected lazy val structField: Parser[StructField] =
    "[a-zA-Z0-9_]*".r ~ ":" ~ dataType ^^ {
      case name ~ _ ~ tpe => StructField(name, tpe, nullable = true)
    }

  protected lazy val structType: Parser[DataType] =
    "struct" ~> "<" ~> repsep(structField, ",") <~ ">" ^^ {
      case fields => StructType(fields)
    }

  protected lazy val dataType: Parser[DataType] =
    arrayType |
    mapType |
    structType |
    primitiveType

  def toDataType(metastoreType: String): DataType = {
    parseAll(dataType, metastoreType) match {
      case Success(result, _) => result
      case _: NoSuccess =>
        throw new AnalysisException(s"Unsupported dataType: $metastoreType")
    }
  }

  def toMetastoreType(dt: DataType): String = {
    dt match {
      case ArrayType(elementType, _) => s"array<${ toMetastoreType(elementType) }>"
      case StructType(fields) =>
        s"struct<${
          fields.map(f => s"${ f.name }:${ toMetastoreType(f.dataType) }")
            .mkString(",")
        }>"
      case MapType(keyType, valueType, _) =>
        s"map<${ toMetastoreType(keyType) }, ${ toMetastoreType(valueType) }>"
      case StringType => "string"
      case FloatType => "float"
      case IntegerType => "int"
      case ShortType => "tinyint"
      case DoubleType => "double"
      case LongType => "bigint"
      case BinaryType => "binary"
      case BooleanType => "boolean"
      case DecimalType() => "decimal"
      case TimestampType => "timestamp"
      case DateType => "date"
    }
  }
} 
Example 17
Source File: LogParser.scala    From temperature-machine   with Apache License 2.0 5 votes vote down vote up
package bad.robot.temperature

import java.time.Instant
import java.time.format.DateTimeFormatterBuilder

import scala.util.parsing.combinator.RegexParsers
import scalaz.{-\/, \/-}

object LogParser extends RegexParsers {

  implicit class ParserResultOps(result: LogParser.ParseResult[LogMessage]) {
    def toDisjunction() = result match {
      case Success(log, _)  => \/-(log)
      case error: NoSuccess => -\/(ParseError(error.toString))
    }
  }
  
  private val utcFormatter = new DateTimeFormatterBuilder().appendPattern("yyyy-MM-dd HH:mm:ss:SSSZ").toFormatter

  private def digits2: Parser[Int] ="""\d{2}""".r ^^ {_.toInt}
  private def digits3: Parser[Int] ="""\d{3}""".r ^^ {_.toInt}
  private def digits4: Parser[Int] ="""\d{4}""".r ^^ {_.toInt}
  private def thread: Parser[String] = """\[(.*?)\]""".r
  private def level: Parser[String] = """INFO|WARN|ERROR|TRACE|DEBUG|CONFIG""".r
  private def words: Parser[String] = """(?s:.*)""".r
  
  def instant: Parser[Instant] = {
    val constituents = digits4 ~ ("-" ~> digits2) ~ ("-" ~> digits2) ~ digits2 ~ (":" ~> digits2) ~ (":" ~> digits2) ~ (":" ~> digits3) ~ ("+" ~> digits4)

    constituents ^^ {
      case year ~ month ~ day ~ hour ~ mins ~ secs ~ millis ~ offset => {
        val string = f"$year-$month%02d-$day%02d $hour%02d:$mins%02d:$secs%02d:$millis%03d+$offset%04d"
        Instant.from(utcFormatter.parse(string))
      }
    }
  }

  def log = {
    instant ~ thread ~ level ~ words ^^ {
      case time ~ thread ~ level ~ message => LogMessage(time, thread, level, message)
    }
  }
} 
Example 18
Source File: DataType.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.sql.catalog
import wvlet.log.LogSupport
import wvlet.airframe.sql.catalog.DataType.{ArrayType, DecimalType}

import scala.util.parsing.combinator.RegexParsers

abstract class DataType(val typeName: String) {
  def baseTypeName: String      = typeName
  override def toString: String = typeName
}

case class NamedType(name: String, dataType: DataType) {
  def typeName: String = s"${name}:${dataType}"
}

object DataType extends LogSupport {
  case object UnknownType extends DataType("?")
  case object AnyType     extends DataType("any")
  case object NullType    extends DataType("null")
  case object BooleanType extends DataType("boolean")
  case object StringType  extends DataType("string")
  case object LongType    extends DataType("long")
  case object DoubleType  extends DataType("double")
  case class DecimalType(precision: Int, scale: Int) extends DataType(s"decimal(${precision},${scale})") {
    override def baseTypeName: String = "decimal"
  }
  case object JsonType                     extends DataType("json")
  case object BinaryType                   extends DataType("binary")
  case object TimestampType                extends DataType("timestamp")
  case class ArrayType(elemType: DataType) extends DataType(s"array[${elemType.typeName}]")
  case class MapType(keyType: DataType, valueType: DataType)
      extends DataType(s"map[${keyType.typeName},${valueType.typeName}]")
  case class RecordType(elems: Seq[NamedType]) extends DataType(s"{${elems.map(_.typeName).mkString(",")}}")

  def primitiveTypeOf(dataType: String): DataType = {
    dataType match {
      case "?"                                        => UnknownType
      case "any"                                      => AnyType
      case "null"                                     => NullType
      case "string"                                   => StringType
      case "byte" | "char" | "short" | "int" | "long" => LongType
      case "float" | "double"                         => DoubleType
      case "boolean"                                  => BooleanType
      case "json"                                     => JsonType
      case "binary"                                   => BinaryType
      case "timestamp"                                => TimestampType
      case _ =>
        warn(s"Unknown type: ${dataType}. Using 'any' instead")
        AnyType
    }
  }

  def parse(typeName: String): Option[DataType] = {
    DataTypeParser.parseDataType(typeName)
  }
}

object DataTypeParser extends RegexParsers with LogSupport {
  override def skipWhitespace = true

  private def typeName: Parser[String] = "[a-zA-Z]+".r
  private def number: Parser[Int]      = "[0-9]*".r ^^ { _.toInt }

  private def primitiveType: Parser[DataType] = typeName ^^ { DataType.primitiveTypeOf(_) }
  private def decimalType: Parser[DataType.DecimalType] =
    "decimal" ~ "(" ~ number ~ "," ~ number ~ ")" ^^ {
      case _ ~ _ ~ p ~ _ ~ s ~ _ =>
        DecimalType(p, s)
    }
  private def arrayType: Parser[DataType.ArrayType] =
    "array" ~ "[" ~ dataType ~ "]" ^^ {
      case _ ~ _ ~ x ~ _ => ArrayType(x)
    }
  private def mapType: Parser[DataType.MapType] =
    "map" ~ "[" ~ dataType ~ "," ~ dataType ~ "]" ^^ {
      case _ ~ _ ~ k ~ _ ~ v ~ _ => DataType.MapType(k, v)
    }

  private def namedType: Parser[NamedType] = typeName ~ ":" ~ dataType ^^ { case n ~ _ ~ t => NamedType(n, t) }
  private def recordType: Parser[DataType.RecordType] =
    "{" ~ namedType ~ rep("," ~ namedType) ~ "}" ^^ {
      case _ ~ head ~ tail ~ _ =>
        DataType.RecordType(head +: tail.map(_._2).toSeq)
    }

  def dataType: Parser[DataType] = decimalType | arrayType | mapType | recordType | primitiveType

  def parseDataType(s: String): Option[DataType] = {
    parseAll(dataType, s) match {
      case Success(result, next) => Some(result)
      case Error(msg, next) =>
        warn(msg)
        None
      case Failure(msg, next) =>
        warn(msg)
        None
    }
  }
} 
Example 19
Source File: CommandLineTokenizer.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.control
import wvlet.log.LogSupport

import scala.util.parsing.combinator.RegexParsers


object CommandLineTokenizer extends RegexParsers with LogSupport {
  private def unquote(s: String): String = s.substring(1, s.length() - 1)

  def stringLiteral: Parser[String] =
    ("\"" + """([^"\p{Cntrl}\\]|\\[\\/\\"bfnrt]|\\u[a-fA-F0-9]{4})*""" + "\"").r ^^ { unquote(_) }
  def quotation: Parser[String] =
    ("'" + """([^'\p{Cntrl}\\]|\\[\\/\\"bfnrt]|\\u[a-fA-F0-9]{4})*""" + "'").r ^^ { unquote(_) }
  def other: Parser[String]        = """([^\"'\s]+)""".r
  def token: Parser[String]        = stringLiteral | quotation | other
  def tokens: Parser[List[String]] = rep(token)

  def tokenize(line: String): Array[String] = {
    val p = parseAll(tokens, line)
    val r = p match {
      case Success(result, next) => result
      case Error(msg, next) => {
        warn(msg)
        List.empty
      }
      case Failure(msg, next) => {
        warn(msg)
        List.empty
      }
    }
    r.toArray
  }
} 
Example 20
Source File: Arrays.scala    From finagle-postgres   with Apache License 2.0 5 votes vote down vote up
package com.twitter.finagle.postgres.values

import scala.collection.immutable.Queue
import scala.util.parsing.combinator.RegexParsers

import com.twitter.util.{Return, Throw, Try}
import io.netty.buffer.ByteBuf
object Arrays {

  object ArrayStringParser extends RegexParsers {

    val value = """([^",}]|\")*""".r | """"([^"]|\")*"""".r
    val valueComma = "," ~ value ^^ { case "," ~ v => v }
    val values = (value ~ valueComma.*) ^^ { case first ~ rest => first :: rest } | value.? ^^ (_.toList)
    val array = "{" ~ values ~ "}" ^^ { case _ ~ vs ~ _ => vs }
    val maybeArrayValue = array | value ^^ (List(_))
    val maybeArrayValueComma = ("," ~ maybeArrayValue) ^^ { case _ ~ v => v}
    val maybeArrayValues =
      (maybeArrayValue ~ maybeArrayValueComma.*) ^^ { case first ~ rest => first ::: rest.flatten } |
        maybeArrayValue.* ^^ (_.flatten)
    val root = "{" ~ maybeArrayValues ~ "}" ^^ {
      case _ ~ vs ~ _ => vs
    }

    def apply(str: String) = parseAll(root, str) match {
      case Success(strings, _) => Return(strings)
      case Failure(_, _) | Error(_, _) => Throw(new Exception("Failed to parse array string"))
    }

  }

  // TODO: this isn't used anywhere, but it would need access to the type map and it would need to receive the elemoid
  def decodeArrayText[T](str: String, elemDecoder: ValueDecoder[T]) = {
    ArrayStringParser(str).flatMap {
      strings => strings.map(str => elemDecoder.decodeText("", str)).foldLeft[Try[Queue[T]]](Return(Queue.empty[T])) {
        (accum, next) => accum.flatMap {
          current => next.map(v => current enqueue v)
        }
      }
    }
  }

  def decodeArrayBinary[T](buf: ByteBuf, elemDecoder: ValueDecoder[T]) = {
    val ndims = buf.readInt()
    val flags = buf.readInt()
    val elemOid = buf.readInt()
  }

} 
Example 21
Source File: HStores.scala    From finagle-postgres   with Apache License 2.0 5 votes vote down vote up
package com.twitter.finagle.postgres.values

import java.nio.charset.Charset

import scala.util.parsing.combinator.RegexParsers

import io.netty.buffer.{ByteBuf, Unpooled}

object HStores {
  object HStoreStringParser extends RegexParsers {
    def key:Parser[String] = "\"" ~ """([^"\\]*(\\.[^"\\]*)*)""".r ~ "\"" ^^ {
      case o~value~c => value.replace("\\\"", "\"").replace("\\\\", "\\")
    }

    def value = key | "NULL"

    def item:Parser[(String, Option[String])] = key ~ "=>" ~ value ^^ {
      case key~arrow~"NULL" => (key, None)
      case key~arrow~value => (key, Some(value))
    }

    def items:Parser[Map[String, Option[String]]] = repsep(item, ", ") ^^ { l => l.toMap }

    def apply(input:String):Option[Map[String, Option[String]]] = parseAll(items, input) match {
      case Success(result, _) => Some(result)
      case failure:NoSuccess => None
    }
  }

  def parseHStoreString(str: String) = HStoreStringParser(str)

  def formatHStoreString(hstore: Map[String, Option[String]]) = hstore.map {
    case (k, v) =>
      val key = s""""${k.replace("\"", "\\\"")}""""
      val value = v.map(str => s""""${str.replace("\"", "\\\"")}"""").getOrElse("NULL")
      s"""$key => $value"""
  }.mkString(",")

  def decodeHStoreBinary(buf: ByteBuf, charset: Charset) = {
    val count = buf.readInt()
    val pairs = Array.fill(count) {
      val keyLength = buf.readInt()
      val key = Array.fill(keyLength)(buf.readByte())
      val valueLength = buf.readInt()
      val value = valueLength match {
        case -1 => None
        case l =>
          val valueBytes = Array.fill(l)(buf.readByte())
          Some(valueBytes)
      }
      new String(key, charset) -> value.map(new String(_, charset))
    }
    pairs.toMap
  }

  def encodeHStoreBinary(hstore: Map[String, Option[String]], charset: Charset) = {
    val buf = Unpooled.buffer()
    buf.writeInt(hstore.size)
    hstore foreach {
      case (key, value) =>
        val keyBytes = key.getBytes(charset)
        buf.writeInt(keyBytes.length)
        buf.writeBytes(keyBytes)
        value match {
          case None => buf.writeInt(-1)
          case Some(v) =>
            val valueBytes = v.getBytes(charset)
            buf.writeInt(valueBytes.length)
            buf.writeBytes(valueBytes)
        }
    }
    buf
  }

}