java.io.NotSerializableException Scala Examples
The following examples show how to use java.io.NotSerializableException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: QueueInputDStream.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{StreamingContext, Time} private[streaming] class QueueInputDStream[T: ClassTag]( ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() queue.synchronized { if (oneAtATime && queue.nonEmpty) { buffer += queue.dequeue() } else { buffer ++= queue queue.clear() } } if (buffer.nonEmpty) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(context.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { Some(ssc.sparkContext.emptyRDD) } } }
Example 2
Source File: PersistenceTckSerializer.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.journal import java.io.NotSerializableException import akka.persistence.journal.JournalPerfSpec.Cmd import akka.serialization.SerializerWithStringManifest import scodec.bits.BitVector import scodec.{Codec, codecs} class PersistenceTckSerializer extends SerializerWithStringManifest { import PersistenceTckSerializer._ def identifier = 585506118 def manifest(a: AnyRef): String = a match { case _: Cmd => cmdManifest case _ => illegalArgument(s"Cannot serialize message of ${ a.getClass } in ${ getClass.getName }") } def toBinary(a: AnyRef): Array[Byte] = { a match { case a: Cmd => cmdCodec.encode(a).require.toByteArray case _ => illegalArgument(s"Cannot serialize message of ${ a.getClass } in ${ getClass.getName }") } } def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { manifest match { case `cmdManifest` => cmdCodec.decode(BitVector.view(bytes)).require.value case _ => notSerializable(s"Cannot deserialize message for manifest $manifest in ${ getClass.getName }") } } private def notSerializable(msg: String) = throw new NotSerializableException(msg) private def illegalArgument(msg: String) = throw new IllegalArgumentException(msg) } object PersistenceTckSerializer { val cmdManifest = "A" implicit val cmdCodec: Codec[Cmd] = { val codec = codecs.utf8_32 :: codecs.int32 codec.as[Cmd] } }
Example 3
Source File: PersistenceMessageSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.javadsl.persistence.protobuf import java.io.NotSerializableException import akka.actor.ExtendedActorSystem import akka.serialization.SerializationExtension import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive import com.lightbend.lagom.persistence.ActorSystemSpec import com.lightbend.lagom.javadsl.persistence.CommandEnvelope import com.lightbend.lagom.javadsl.persistence.PersistentEntity.InvalidCommandException import com.lightbend.lagom.javadsl.persistence.PersistentEntity.PersistException import com.lightbend.lagom.javadsl.persistence.PersistentEntity.UnhandledCommandException import com.lightbend.lagom.javadsl.persistence.PersistentEntityRef import com.lightbend.lagom.javadsl.persistence.TestEntity import java.time.{ Duration => JDuration } class PersistenceMessageSerializerSpec extends ActorSystemSpec { val serializer = new PersistenceMessageSerializer(system.asInstanceOf[ExtendedActorSystem]) def checkSerialization(obj: AnyRef): Unit = { // check that it is configured SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[PersistenceMessageSerializer]) // verify serialization-deserialization round trip val blob = serializer.toBinary(obj) val obj2 = serializer.fromBinary(blob, serializer.manifest(obj)) obj2 should be(obj) } "PersistenceMessageSerializer" must { "serialize CommandEnvelope" in { checkSerialization(CommandEnvelope("entityId", TestEntity.Add.of("a"))) } "serialize EnsureActive" in { checkSerialization(EnsureActive("foo")) } "serialize InvalidCommandException" in { checkSerialization(InvalidCommandException("wrong")) } "serialize UnhandledCommandException" in { checkSerialization(UnhandledCommandException("unhandled")) } "serialize PersistException" in { checkSerialization(PersistException("not stored")) } "not serialize PersistentEntityRef" in { intercept[NotSerializableException] { SerializationExtension(system) .serialize(new PersistentEntityRef[String]("abc", system.deadLetters, JDuration.ofSeconds(5))) .get } } } }
Example 4
Source File: PersistentEntityRef.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.persistence import scala.concurrent.duration._ import scala.concurrent.Future import akka.actor.ActorRef import java.io.NotSerializableException import akka.actor.NoSerializationVerificationNeeded import akka.actor.ActorSystem import akka.util.Timeout import akka.pattern.{ ask => akkaAsk } def withAskTimeout(timeout: FiniteDuration): PersistentEntityRef[Command] = new PersistentEntityRef(entityId, region, system, askTimeout = timeout) // Reasons for why we don't not support serialization of the PersistentEntityRef: // - it will rarely be sent as a message itself, so providing a serializer will not help // - it will be embedded in other messages and the only way we could support that // transparently is to implement java serialization (readResolve, writeReplace) // like ActorRef, but we don't want to encourage java serialization anyway // - serializing/embedding the entityId String in other messages is simple // - might be issues with the type `Command`? @throws(classOf[java.io.ObjectStreamException]) protected def writeReplace(): AnyRef = throw new NotSerializableException(s"${getClass.getName} is not serializable. Send the entityId instead.") override def toString: String = s"PersistentEntityRef($entityId)" }
Example 5
Source File: PersistenceMessageSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.scaladsl.persistence.protobuf import java.io.NotSerializableException import scala.concurrent.duration._ import akka.actor.ExtendedActorSystem import akka.serialization.SerializationExtension import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive import com.lightbend.lagom.persistence.ActorSystemSpec import com.lightbend.lagom.scaladsl.persistence.CommandEnvelope import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.InvalidCommandException import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.PersistException import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.UnhandledCommandException import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRef import com.lightbend.lagom.scaladsl.persistence.TestEntity import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import com.lightbend.lagom.scaladsl.persistence.TestEntitySerializerRegistry class PersistenceMessageSerializerSpec extends ActorSystemSpec(JsonSerializerRegistry.actorSystemSetupFor(TestEntitySerializerRegistry)) { val serializer = new PersistenceMessageSerializer(system.asInstanceOf[ExtendedActorSystem]) def checkSerialization(obj: AnyRef): Unit = { // check that it is configured SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[PersistenceMessageSerializer]) // verify serialization-deserialization round trip val blob = serializer.toBinary(obj) val obj2 = serializer.fromBinary(blob, serializer.manifest(obj)) obj2 should be(obj) } "PersistenceMessageSerializer" must { "serialize CommandEnvelope" in { checkSerialization(CommandEnvelope("entityId", TestEntity.Add("a"))) } "serialize EnsureActive" in { checkSerialization(EnsureActive("foo")) } "serialize InvalidCommandException" in { checkSerialization(InvalidCommandException("wrong")) } "serialize UnhandledCommandException" in { checkSerialization(UnhandledCommandException("unhandled")) } "serialize PersistException" in { checkSerialization(PersistException("not stored")) } "not serialize PersistentEntityRef" in { intercept[NotSerializableException] { SerializationExtension(system) .serialize(new PersistentEntityRef[String]("abc", system.deadLetters, system, 5.seconds)) .get } } } }
Example 6
Source File: QueueInputDStream.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{StreamingContext, Time} private[streaming] class QueueInputDStream[T: ClassTag]( ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() queue.synchronized { if (oneAtATime && queue.nonEmpty) { buffer += queue.dequeue() } else { buffer ++= queue queue.clear() } } if (buffer.nonEmpty) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(context.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { Some(ssc.sparkContext.emptyRDD) } } }
Example 7
Source File: EmittedSerializer.scala From akka-stream-eventsourcing with Apache License 2.0 | 5 votes |
package com.github.krasserm.ases.serializer import java.io.NotSerializableException import akka.actor.ExtendedActorSystem import akka.serialization.Serializer import com.github.krasserm.ases.Emitted import com.github.krasserm.ases.serializer.EmittedFormatOuterClass.EmittedFormat class EmittedSerializer(system: ExtendedActorSystem) extends Serializer { private val EmittedClass = classOf[Emitted[_]] private val payloadSerializer = new PayloadSerializer(system) override def identifier: Int = 17406883 override def includeManifest: Boolean = true override def toBinary(o: AnyRef): Array[Byte] = o match { case emitted: Emitted[_] => emittedFormat(emitted).toByteArray case _ => throw new IllegalArgumentException(s"Invalid object of type '${o.getClass}' supplied to serializer [id = '$identifier']") } override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { case Some(`EmittedClass`) => emitted(EmittedFormat.parseFrom(bytes)) case None => emitted(EmittedFormat.parseFrom(bytes)) case _ => throw new NotSerializableException(s"Unknown manifest '$manifest' supplied to serializer [id = '$identifier']") } private def emittedFormat(emitted: Emitted[Any]): EmittedFormat = { EmittedFormat.newBuilder() .setEvent(payloadSerializer.payloadFormatBuilder(emitted.event.asInstanceOf[AnyRef])) .setEmitterId(emitted.emitterId) .setEmissionUuid(emitted.emissionUuid) .build() } private def emitted(format: EmittedFormat): Emitted[_] = { Emitted( payloadSerializer.payload(format.getEvent), format.getEmitterId, format.getEmissionUuid ) } }
Example 8
Source File: QueueInputDStream.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{StreamingContext, Time} private[streaming] class QueueInputDStream[T: ClassTag]( ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() queue.synchronized { if (oneAtATime && queue.nonEmpty) { buffer += queue.dequeue() } else { buffer ++= queue queue.clear() } } if (buffer.nonEmpty) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(context.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { Some(ssc.sparkContext.emptyRDD) } } }
Example 9
Source File: QueueInputDStream.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{Time, StreamingContext} private[streaming] class QueueInputDStream[T: ClassTag]( @transient ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() if (oneAtATime && queue.size > 0) { buffer += queue.dequeue() } else { buffer ++= queue.dequeueAll(_ => true) } if (buffer.size > 0) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(ssc.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { None } } }
Example 10
Source File: QueueInputDStream.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{Time, StreamingContext} private[streaming] class QueueInputDStream[T: ClassTag]( @transient ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() if (oneAtATime && queue.size > 0) { buffer += queue.dequeue() } else { buffer ++= queue.dequeueAll(_ => true) } if (buffer.size > 0) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(ssc.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { None } } }
Example 11
Source File: QueueInputDStream.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{StreamingContext, Time} private[streaming] class QueueInputDStream[T: ClassTag]( ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() queue.synchronized { if (oneAtATime && queue.nonEmpty) { buffer += queue.dequeue() } else { buffer ++= queue queue.clear() } } if (buffer.nonEmpty) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(context.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { Some(ssc.sparkContext.emptyRDD) } } }
Example 12
Source File: QueueInputDStream.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.dstream import java.io.{NotSerializableException, ObjectInputStream, ObjectOutputStream} import scala.collection.mutable.{ArrayBuffer, Queue} import scala.reflect.ClassTag import org.apache.spark.rdd.{RDD, UnionRDD} import org.apache.spark.streaming.{Time, StreamingContext} private[streaming] class QueueInputDStream[T: ClassTag]( ssc: StreamingContext, val queue: Queue[RDD[T]], oneAtATime: Boolean, defaultRDD: RDD[T] ) extends InputDStream[T](ssc) { override def start() { } override def stop() { } private def readObject(in: ObjectInputStream): Unit = { throw new NotSerializableException("queueStream doesn't support checkpointing. " + "Please don't use queueStream when checkpointing is enabled.") } private def writeObject(oos: ObjectOutputStream): Unit = { logWarning("queueStream doesn't support checkpointing") } override def compute(validTime: Time): Option[RDD[T]] = { val buffer = new ArrayBuffer[RDD[T]]() if (oneAtATime && queue.size > 0) { buffer += queue.dequeue() } else { buffer ++= queue.dequeueAll(_ => true) } if (buffer.size > 0) { if (oneAtATime) { Some(buffer.head) } else { Some(new UnionRDD(context.sc, buffer.toSeq)) } } else if (defaultRDD != null) { Some(defaultRDD) } else { Some(ssc.sparkContext.emptyRDD) } } }
Example 13
Source File: UserRepositorySerializer.scala From whirlwind-tour-akka-typed with Apache License 2.0 | 5 votes |
package de.heikoseeberger.wtat import akka.actor.ExtendedActorSystem import akka.actor.typed.{ ActorRef, ActorRefResolver } import akka.serialization.SerializerWithStringManifest import de.heikoseeberger.wtat.proto.userrepository.{ AddUser => AddUserProto, RemoveUser => RemoveUserProto, UserAdded => UserAddedProto, UserRemoved => UserRemovedProto, UsernameTaken => UsernameTakenProto, UsernameUnknown => UsernameUnknownProto } import de.heikoseeberger.wtat.proto.{ User => UserProto } import java.io.NotSerializableException final class UserRepositorySerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest { import UserRepository._ import akka.actor.typed.scaladsl.adapter._ override val identifier = 4243 private final val AddUserManifest = "AddUser" private final val UsernameTakenManifest = "UsernameTaken" private final val UserAddedManifest = "UserAdded" private final val RemoveUserManifest = "RemoveUser" private final val UsernameUnknownManifest = "UsernameUnknown" private final val UserRemovedManifest = "UserRemoved" private val resolver = ActorRefResolver(system.toTyped) override def manifest(o: AnyRef) = o match { case serializable: Serializable => serializable match { case _: AddUser => AddUserManifest case _: UsernameTaken => UsernameTakenManifest case _: UserAdded => UserAddedManifest case _: RemoveUser => RemoveUserManifest case _: UsernameUnknown => UsernameUnknownManifest case _: UserRemoved => UserRemovedManifest } case _ => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!") } override def toBinary(o: AnyRef) = { def userProto(user: User) = UserProto(user.username.value, user.nickname.value) def toBinary(ref: ActorRef[_]) = resolver.toSerializationFormat(ref) val proto = o match { case serializable: Serializable => serializable match { case AddUser(user, replyTo) => AddUserProto(Some(userProto(user)), toBinary(replyTo)) case UsernameTaken(username) => UsernameTakenProto(username) case UserAdded(user) => UserAddedProto(Some(userProto(user))) case RemoveUser(username, replyTo) => RemoveUserProto(username, toBinary(replyTo)) case UsernameUnknown(username) => UsernameUnknownProto(username) case UserRemoved(username) => UserRemovedProto(username) } case _ => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!") } proto.toByteArray } override def fromBinary(bytes: Array[Byte], manifest: String) = { def addUser(proto: AddUserProto) = AddUser(user(proto.user.get), fromBinary(proto.replyTo)) def usernameTaken(proto: UsernameTakenProto) = UsernameTaken(proto.username) def userAdded(proto: UserAddedProto) = UserAdded(user(proto.user.get)) def user(proto: UserProto) = User(proto.username, proto.nickname).valueOr(_.fail) def removeUser(proto: RemoveUserProto) = RemoveUser(proto.username, fromBinary(proto.replyTo)) def usernameUnknown(proto: UsernameUnknownProto) = UsernameUnknown(proto.username) def userRemoved(proto: UserRemovedProto) = UserRemoved(proto.username) def fromBinary(ref: String) = resolver.resolveActorRef(ref) manifest match { case AddUserManifest => addUser(AddUserProto.parseFrom(bytes)) case UsernameTakenManifest => usernameTaken(UsernameTakenProto.parseFrom(bytes)) case UserAddedManifest => userAdded(UserAddedProto.parseFrom(bytes)) case RemoveUserManifest => removeUser(RemoveUserProto.parseFrom(bytes)) case UsernameUnknownManifest => usernameUnknown(UsernameUnknownProto.parseFrom(bytes)) case UserRemovedManifest => userRemoved(UserRemovedProto.parseFrom(bytes)) case _ => throw new NotSerializableException(s"Unknown manifest: $manifest!") } } }
Example 14
Source File: UserSerializer.scala From whirlwind-tour-akka-typed with Apache License 2.0 | 5 votes |
package de.heikoseeberger.wtat import akka.actor.ExtendedActorSystem import akka.serialization.SerializerWithStringManifest import de.heikoseeberger.wtat.proto.{ User => UserProto } import java.io.NotSerializableException final class UserSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest { override val identifier = 4242 private final val UserManifest = "User" override def manifest(o: AnyRef) = o match { case _: User => UserManifest case _ => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!") } override def toBinary(o: AnyRef) = o match { case User(username, nickname) => UserProto(username.value, nickname.value).toByteArray case _ => throw new IllegalArgumentException(s"Unknown class: ${o.getClass}!") } override def fromBinary(bytes: Array[Byte], manifest: String) = { def user(pb: UserProto) = User(pb.username, pb.nickname) manifest match { case UserManifest => user(UserProto.parseFrom(bytes)) case _ => throw new NotSerializableException(s"Unknown manifest: $manifest!") } } }