com.google.api.client.json.jackson2.JacksonFactory Scala Examples
The following examples show how to use com.google.api.client.json.jackson2.JacksonFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: GoogleAuthentication.scala From amadou with Apache License 2.0 | 5 votes |
package com.mediative.amadou.bigquery import java.io.{File, FileReader} import scala.collection.JavaConversions._ import com.google.api.client.extensions.java6.auth.oauth2.AuthorizationCodeInstalledApp import com.google.api.client.extensions.jetty.auth.oauth2.LocalServerReceiver import com.google.api.client.googleapis.auth.oauth2.{ GoogleAuthorizationCodeFlow, GoogleClientSecrets } import com.google.api.client.http.{HttpRequest, HttpRequestInitializer} import com.google.api.client.http.javanet.NetHttpTransport import com.google.api.client.json.jackson2.JacksonFactory import com.google.api.client.util.store.FileDataStoreFactory import org.apache.spark.sql.SparkSession sealed abstract class GoogleAuthentication(val scopes: String*) object GoogleAuthentication { lazy val HTTP_TRANSPORT = new NetHttpTransport() lazy val JSON_FACTORY = new JacksonFactory() case object Dbm extends GoogleAuthentication("https://www.googleapis.com/auth/doubleclickbidmanager") def apply(auth: GoogleAuthentication, spark: SparkSession): HttpRequestInitializer = auth match { case Dbm => val clientFilePath = spark.conf.get("spark.google.cloud.auth.client.file") require(clientFilePath != null, "'google.cloud.auth.client.file' not configured") val clientFile = new File(clientFilePath) require(clientFile.exists, s"$clientFilePath does not exists") val clientSecrets = GoogleClientSecrets.load(JSON_FACTORY, new FileReader(clientFile)) val dataStoreFactory = new FileDataStoreFactory(clientFile.getParentFile) val flow = new GoogleAuthorizationCodeFlow.Builder( HTTP_TRANSPORT, JSON_FACTORY, clientSecrets, auth.scopes) .setDataStoreFactory(dataStoreFactory) .build() val cred = new AuthorizationCodeInstalledApp(flow, new LocalServerReceiver()) .authorize("user") new CustomHttpRequestInitializer(cred) } class CustomHttpRequestInitializer(wrapped: HttpRequestInitializer) extends HttpRequestInitializer { override def initialize(httpRequest: HttpRequest) = { wrapped.initialize(httpRequest) httpRequest.setConnectTimeout(10 * 60000) // 10 minutes connect timeout httpRequest.setReadTimeout(10 * 60000) // 10 minutes read timeout () } } }
Example 2
Source File: Schemas.scala From ratatool with Apache License 2.0 | 5 votes |
package com.spotify.ratatool import com.google.api.client.json.JsonObjectParser import com.google.api.client.json.jackson2.JacksonFactory import com.google.api.services.bigquery.model.TableSchema import com.google.common.base.Charsets import org.apache.avro.Schema object Schemas { val avroSchema: Schema = new Schema.Parser().parse(this.getClass.getResourceAsStream("/schema.avsc")) val simpleAvroSchema: Schema = new Schema.Parser().parse(this.getClass.getResourceAsStream("/SimpleRecord.avsc")) val evolvedSimpleAvroSchema: Schema = new Schema.Parser().parse(this.getClass.getResourceAsStream("/EvolvedSimpleRecord.avsc")) val simpleAvroByteFieldSchema: Schema = new Schema.Parser().parse(this.getClass.getResourceAsStream("/SimpleByteFieldRecord.avsc")) val tableSchema: TableSchema = new JsonObjectParser(new JacksonFactory) .parseAndClose( this.getClass.getResourceAsStream("/schema.json"), Charsets.UTF_8, classOf[TableSchema]) }
Example 3
Source File: TableRowDiffy.scala From ratatool with Apache License 2.0 | 5 votes |
package com.spotify.ratatool.diffy import java.io.StringReader import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature} import com.google.api.client.json.JsonObjectParser import com.google.api.client.json.jackson2.JacksonFactory import com.google.api.services.bigquery.model.{TableFieldSchema, TableRow, TableSchema} import scala.jdk.CollectionConverters._ import scala.util.Try class TableRowDiffy(tableSchema: TableSchema, ignore: Set[String] = Set.empty, unordered: Set[String] = Set.empty, unorderedFieldKeys: Map[String, String] = Map()) extends Diffy[TableRow](ignore, unordered, unorderedFieldKeys) { override def apply(x: TableRow, y: TableRow): Seq[Delta] = diff(Option(x), Option(y), schema.getFields.asScala.toList, "") private type Record = java.util.Map[String, AnyRef] // TableSchema is not serializable private val schemaString: String = new ObjectMapper().disable(SerializationFeature.FAIL_ON_EMPTY_BEANS) .writeValueAsString(tableSchema) private lazy val schema: TableSchema = new JsonObjectParser(new JacksonFactory) .parseAndClose(new StringReader(schemaString), classOf[TableSchema]) // scalastyle:off cyclomatic.complexity private def diff(x: Option[Record], y: Option[Record], fields: Seq[TableFieldSchema], root: String): Seq[Delta] = { def getField(f: String)(x: Record): Option[AnyRef] = { Option(x.get(f)) } fields.flatMap { f => val name = f.getName val fullName = if (root.isEmpty) name else root + "." + name if (f.getType == "RECORD" && f.getMode != "REPEATED") { val a = x.flatMap(r => getField(name)(r).map(_.asInstanceOf[Record])) val b = y.flatMap(r => getField(name)(r).map(_.asInstanceOf[Record])) if (a.isEmpty && b.isEmpty) { Nil } else if (a.isEmpty || b.isEmpty) { Seq(Delta(fullName, a, b, UnknownDelta)) } else { diff(a, b, f.getFields.asScala.toList, fullName) } } else if (f.getMode == "REPEATED" && unordered.contains(fullName)) { if (f.getType == "RECORD" && unorderedFieldKeys.contains(fullName)) { val l = x .flatMap(outer => getField(name)(outer).map(_.asInstanceOf[java.util.List[Record]].asScala.toList)) .getOrElse(List()) .flatMap(inner => Try(inner.get(unorderedFieldKeys(fullName))).toOption.map(k => (k, inner))).toMap val r = y .flatMap(outer => getField(name)(outer).map(_.asInstanceOf[java.util.List[Record]].asScala.toList)) .getOrElse(List()) .flatMap(inner => Try(inner.get(unorderedFieldKeys(fullName))).toOption.map(k => (k, inner))).toMap (l.keySet ++ r.keySet).flatMap(k => diff(l.get(k), r.get(k), f.getFields.asScala.toList, fullName)) } else { val a = x.flatMap(r => Option(r.get(name).asInstanceOf[java.util.List[AnyRef]])) .map(sortList) val b = y.flatMap(r => Option(r.get(name).asInstanceOf[java.util.List[AnyRef]])) .map(sortList) if (a == b) Nil else Seq(Delta(fullName, a, b, delta(a.orNull, b.orNull))) } } else { val a = x.flatMap(r => getField(name)(r)) val b = y.flatMap(r => getField(name)(r)) if (a == b) Nil else Seq(Delta(fullName, a, b, delta(a.orNull, b.orNull))) } }.filter(d => !ignore.contains(d.field)) } // scalastyle:on cyclomatic.complexity }
Example 4
Source File: BigQueryUtilTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.bigquery import com.google.api.client.json.jackson2.JacksonFactory import com.google.api.services.bigquery.model.{TableFieldSchema, TableSchema} import org.scalatest.matchers.should.Matchers import org.scalatest.flatspec.AnyFlatSpec import scala.jdk.CollectionConverters._ class BigQueryUtilTest extends AnyFlatSpec with Matchers { "parseSchema" should "work" in { val schema = new TableSchema().setFields( List( new TableFieldSchema() .setName("f1") .setType("INTEGER") .setMode("REQUIRED"), new TableFieldSchema() .setName("f2") .setType("FLOAT") .setMode("NULLABLE"), new TableFieldSchema() .setName("f3") .setType("TIMESTAMP") .setMode("REPEATED"), new TableFieldSchema() .setName("f4") .setMode("RECORD") .setFields( List( new TableFieldSchema() .setName("f5") .setType("BOOLEAN") .setMode("REQUIRED"), new TableFieldSchema() .setName("f6") .setType("STRING") .setMode("NULLABLE"), new TableFieldSchema() .setName("f6") .setType("STRING") .setMode("REPEATED") ).asJava ) ).asJava ) schema.setFactory(new JacksonFactory) BigQueryUtil.parseSchema(schema.toString) shouldBe schema } }
Example 5
Source File: BeamTypeCoders.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.coders.instances import com.google.api.client.json.GenericJson import com.google.api.client.json.jackson2.JacksonFactory import com.google.api.client.json.JsonObjectParser import com.google.api.services.bigquery.model.TableRow import com.spotify.scio.coders.Coder import com.spotify.scio.util.ScioUtil import java.io.StringReader import org.apache.beam.sdk.coders.RowCoder import org.apache.beam.sdk.io.FileIO.ReadableFile import org.apache.beam.sdk.io.fs.{MatchResult, MetadataCoderV2} import org.apache.beam.sdk.io.gcp.bigquery.TableRowJsonCoder import org.apache.beam.sdk.io.gcp.pubsub.{PubsubMessage, PubsubMessageWithAttributesCoder} import org.apache.beam.sdk.io.ReadableFileCoder import org.apache.beam.sdk.schemas.{Schema => BSchema} import org.apache.beam.sdk.transforms.windowing.{BoundedWindow, IntervalWindow, PaneInfo} import org.apache.beam.sdk.values.{KV, Row} import scala.reflect.ClassTag trait BeamTypeCoders { import BeamTypeCoders._ implicit def intervalWindowCoder: Coder[IntervalWindow] = Coder.beam(IntervalWindow.getCoder) implicit def boundedWindowCoder: Coder[BoundedWindow] = Coder.kryo[BoundedWindow] implicit def paneInfoCoder: Coder[PaneInfo] = Coder.beam(PaneInfo.PaneInfoCoder.of()) implicit def tableRowCoder: Coder[TableRow] = Coder.beam(TableRowJsonCoder.of()) def row(schema: BSchema): Coder[Row] = Coder.beam(RowCoder.of(schema)) implicit def messageCoder: Coder[PubsubMessage] = Coder.beam(PubsubMessageWithAttributesCoder.of()) implicit def beamKVCoder[K: Coder, V: Coder]: Coder[KV[K, V]] = Coder.kv(Coder[K], Coder[V]) implicit def readableFileCoder: Coder[ReadableFile] = Coder.beam(new ReadableFileCoder()) implicit def matchResultMetadataCoder: Coder[MatchResult.Metadata] = Coder.beam(MetadataCoderV2.of()) implicit def genericJsonCoder[T <: GenericJson: ClassTag]: Coder[T] = Coder.xmap(Coder[String])( str => DefaultJsonObjectParser.parseAndClose(new StringReader(str), ScioUtil.classOf[T]), DefaultJsonObjectParser.getJsonFactory().toString(_) ) } private[coders] object BeamTypeCoders extends BeamTypeCoders { private lazy val DefaultJsonObjectParser = new JsonObjectParser(new JacksonFactory) }
Example 6
Source File: AdWordsAuthHelper.scala From spark-google-adwords with Apache License 2.0 | 5 votes |
package com.crealytics.google.adwords import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow import com.google.api.client.googleapis.auth.oauth2.GoogleCredential import com.google.api.client.http.javanet.NetHttpTransport import com.google.api.client.json.jackson2.JacksonFactory import com.google.common.collect.Lists class AdWordsAuthHelper(clientId: String, clientSecret: String) { val ADWORDS_API_SCOPE = "https://www.googleapis.com/auth/adwords" private val SCOPES = Lists.newArrayList(ADWORDS_API_SCOPE) private val CALLBACK_URL = "urn:ietf:wg:oauth:2.0:oob" val authorizationFlow: GoogleAuthorizationCodeFlow = new GoogleAuthorizationCodeFlow.Builder( new NetHttpTransport(), new JacksonFactory(), clientId, clientSecret, SCOPES ).setAccessType("offline") .build() val authorizationUrl: String = authorizationFlow.newAuthorizationUrl().setRedirectUri(CALLBACK_URL).build() def getRefreshToken(authorizationCode: String): String = { val tokenRequest = authorizationFlow.newTokenRequest(authorizationCode) tokenRequest.setRedirectUri(CALLBACK_URL) val tokenResponse = tokenRequest.execute() val credential = new GoogleCredential.Builder() .setTransport(new NetHttpTransport()) .setJsonFactory(new JacksonFactory()) .setClientSecrets(clientId, clientSecret) .build() credential.setFromTokenResponse(tokenResponse) credential.getRefreshToken } }