From ba9fc4da460fcd040d303263e000e6c04b3e13b4 Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Thu, 7 Nov 2013 10:45:02 +0100 Subject: [PATCH] !per #3707 Channel enhancements - Persistent channel - ConfirmablePersistent message type delivered by channel - Sender resolution performance improvements * unstash() instead of unstashAll() These enhancements required the following changes - Unified implementation of processor stash and user stash - Persistence message plugin API separated from implementation - Physical deletion of messages --- .../src/main/scala/akka/actor/Stash.scala | 98 +- .../docs/persistence/PersistenceDocTest.java | 19 +- .../persistence/PersistencePluginDocTest.java | 8 +- akka-docs/rst/java/persistence.rst | 92 +- .../docs/persistence/PersistenceDocSpec.scala | 15 +- .../PersistencePluginDocSpec.scala | 8 +- akka-docs/rst/scala/persistence.rst | 91 +- .../journal/japi/AsyncReplayPlugin.java | 21 +- .../journal/japi/AsyncWritePlugin.java | 30 +- .../journal/japi/SyncWritePlugin.java | 29 +- .../serialization/MessageFormats.java | 1181 ++++++++++++++--- .../snapshot/japi/SnapshotStorePlugin.java | 16 +- .../src/main/protobuf/MessageFormats.proto | 14 +- .../src/main/resources/reference.conf | 4 +- .../main/scala/akka/persistence/Channel.scala | 240 +++- .../scala/akka/persistence/Eventsourced.scala | 40 +- .../akka/persistence/JournalProtocol.scala | 20 +- .../scala/akka/persistence/Persistent.scala | 269 ++-- .../scala/akka/persistence/Processor.scala | 121 +- .../scala/akka/persistence/Snapshot.scala | 5 +- .../persistence/journal/AsyncReplay.scala | 20 +- .../journal/AsyncWriteJournal.scala | 36 +- .../journal/SyncWriteJournal.scala | 33 +- .../journal/inmem/InmemJournal.scala | 38 +- .../journal/japi/AsyncReplay.scala | 12 +- .../journal/japi/AsyncWriteJournal.scala | 14 +- .../journal/japi/SyncWriteJournal.scala | 14 +- .../journal/leveldb/LeveldbJournal.scala | 19 +- .../journal/leveldb/LeveldbKey.scala | 1 + .../journal/leveldb/LeveldbReplay.scala | 9 +- .../main/scala/akka/persistence/package.scala | 1 + .../serialization/MessageSerializer.scala | 73 +- .../serialization/SnapshotSerializer.scala | 1 + .../persistence/snapshot/SnapshotStore.scala | 17 +- .../snapshot/japi/SnapshotStore.scala | 4 +- .../snapshot/local/LocalSnapshotStore.scala | 1 + .../scala/akka/persistence/ChannelSpec.scala | 206 ++- .../serialization/SerializerSpec.scala | 53 +- .../japi/ProcessorChannelExample.java | 4 +- .../ConversationRecoveryExample.scala | 10 +- .../persistence/ProcessorChannelExample.scala | 2 +- 41 files changed, 2167 insertions(+), 722 deletions(-) diff --git a/akka-actor/src/main/scala/akka/actor/Stash.scala b/akka-actor/src/main/scala/akka/actor/Stash.scala index 60bec667a2..29e89353aa 100644 --- a/akka-actor/src/main/scala/akka/actor/Stash.scala +++ b/akka-actor/src/main/scala/akka/actor/Stash.scala @@ -3,9 +3,10 @@ */ package akka.actor -import akka.dispatch.{ UnboundedDequeBasedMessageQueueSemantics, RequiresMessageQueue, Envelope, DequeBasedMessageQueueSemantics } +import scala.collection.immutable + import akka.AkkaException -import akka.dispatch.Mailboxes +import akka.dispatch.{ UnboundedDequeBasedMessageQueueSemantics, RequiresMessageQueue, Envelope, DequeBasedMessageQueueSemantics, Mailboxes } /** * The `Stash` trait enables an actor to temporarily stash away messages that can not or @@ -60,7 +61,59 @@ trait UnboundedStash extends UnrestrictedStash with RequiresMessageQueue[Unbound * A version of [[akka.actor.Stash]] that does not enforce any mailbox type. The proper mailbox has to be configured * manually, and the mailbox should extend the [[akka.dispatch.DequeBasedMessageQueueSemantics]] marker trait. */ -trait UnrestrictedStash extends Actor { +trait UnrestrictedStash extends Actor with StashSupport { + /** + * Overridden callback. Prepends all messages in the stash to the mailbox, + * clears the stash, stops all children and invokes the postStop() callback. + */ + override def preRestart(reason: Throwable, message: Option[Any]): Unit = { + try unstashAll() finally super.preRestart(reason, message) + } + + /** + * Overridden callback. Prepends all messages in the stash to the mailbox and clears the stash. + * Must be called when overriding this method, otherwise stashed messages won't be propagated to DeadLetters + * when actor stops. + */ + override def postStop(): Unit = try unstashAll() finally super.postStop() +} + +/** + * INTERNAL API. + * + * A factory for creating stashes for an actor instance. + * + * @see [[StashSupport]] + */ +private[akka] trait StashFactory { this: Actor ⇒ + private[akka] def createStash()(implicit ctx: ActorContext, ref: ActorRef): StashSupport = new StashSupport { + def context: ActorContext = ctx + def self: ActorRef = ref + } +} + +/** + * INTERNAL API. + * + * Support trait for implementing a stash for an actor instance. A default stash per actor (= user stash) + * is maintained by [[UnrestrictedStash]] by extending this trait. Actors that explicitly need other stashes + * (optionally in addition to and isolated from the user stash) can create new stashes via [[StashFactory]]. + */ +private[akka] trait StashSupport { + /** + * INTERNAL API. + * + * Context of the actor that uses this stash. + */ + private[akka] def context: ActorContext + + /** + * INTERNAL API. + * + * Self reference of the actor that uses this stash. + */ + private[akka] def self: ActorRef + /* The private stash of the actor. It is only accessible using `stash()` and * `unstashAll()`. */ @@ -110,6 +163,29 @@ trait UnrestrictedStash extends Actor { else throw new StashOverflowException("Couldn't enqueue message " + currMsg + " to stash of " + self) } + /** + * Prepends `others` to this stash. + */ + private[akka] def prepend(others: immutable.Seq[Envelope]): Unit = + others.reverseIterator.foreach(env ⇒ theStash = env +: theStash) + + /** + * Prepends the oldest message in the stash to the mailbox, and then removes that + * message from the stash. + * + * Messages from the stash are enqueued to the mailbox until the capacity of the + * mailbox (if any) has been reached. In case a bounded mailbox overflows, a + * `MessageQueueAppendFailedException` is thrown. + * + * The unstashed message is guaranteed to be removed from the stash regardless + * if the `unstash()` call successfully returns or throws an exception. + */ + private[akka] def unstash(): Unit = if (theStash.nonEmpty) try { + mailbox.enqueueFirst(self, theStash.head) + } finally { + theStash = theStash.tail + } + /** * Prepends all messages in the stash to the mailbox, and then clears the stash. * @@ -155,22 +231,6 @@ trait UnrestrictedStash extends Actor { theStash = Vector.empty[Envelope] stashed } - - /** - * Overridden callback. Prepends all messages in the stash to the mailbox, - * clears the stash, stops all children and invokes the postStop() callback. - */ - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - try unstashAll() finally super.preRestart(reason, message) - } - - /** - * Overridden callback. Prepends all messages in the stash to the mailbox and clears the stash. - * Must be called when overriding this method, otherwise stashed messages won't be propagated to DeadLetters - * when actor stops. - */ - override def postStop(): Unit = try unstashAll() finally super.postStop() - } /** diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java index 59b8e7e159..7f1a1f7c35 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java @@ -142,8 +142,8 @@ public class PersistenceDocTest { class MyDestination extends UntypedActor { public void onReceive(Object message) throws Exception { - if (message instanceof Persistent) { - Persistent p = (Persistent)message; + if (message instanceof ConfirmablePersistent) { + ConfirmablePersistent p = (ConfirmablePersistent)message; System.out.println("received " + p.payload()); p.confirm(); } @@ -266,4 +266,19 @@ public class PersistenceDocTest { } //#batch-write }; + + static Object o7 = new Object() { + abstract class MyProcessor extends UntypedProcessor { + ActorRef destination; + + public void foo() { + //#persistent-channel-example + final ActorRef channel = getContext().actorOf(PersistentChannel.props(), + "myPersistentChannel"); + + channel.tell(Deliver.create(Persistent.create("example"), destination), getSelf()); + //#persistent-channel-example + } + } + }; } diff --git a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java index 78caaeb8fe..5d1c04582d 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java @@ -36,22 +36,22 @@ public class PersistencePluginDocTest { class MyAsyncJournal extends AsyncWriteJournal { @Override - public Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback) { + public Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback) { return null; } @Override - public Future doWriteAsync(PersistentImpl persistent) { + public Future doWriteAsync(PersistentRepr persistent) { return null; } @Override - public Future doWriteBatchAsync(Iterable persistentBatch) { + public Future doWriteBatchAsync(Iterable persistentBatch) { return null; } @Override - public Future doDeleteAsync(PersistentImpl persistent) { + public Future doDeleteAsync(String processorId, long sequenceNr, boolean physical) { return null; } diff --git a/akka-docs/rst/java/persistence.rst b/akka-docs/rst/java/persistence.rst index 01d1ca0ac6..76f1a19132 100644 --- a/akka-docs/rst/java/persistence.rst +++ b/akka-docs/rst/java/persistence.rst @@ -57,6 +57,9 @@ Architecture * *Snapshot store*: A snapshot store persists snapshots of a processor's internal state. Snapshots are used for optimizing recovery times. The storage backend of a snapshot store is pluggable. +* *Event sourcing*. Based on the building blocks described above, Akka persistence provides abstractions for the + development of event sourced applications (see section :ref:`event-sourcing-java`) + Configuration ============= @@ -115,7 +118,8 @@ If not overridden, ``preStart`` sends a ``Recover`` message to ``getSelf()``. Ap .. includecode:: code/docs/persistence/PersistenceDocTest.java#recover-on-start-custom -Automated recovery on restart can be disabled by overriding ``preRestart`` with an empty implementation. +Upper sequence number bounds can be used to recover a processor to past state instead of current state. Automated +recovery on restart can be disabled by overriding ``preRestart`` with an empty implementation. .. includecode:: code/docs/persistence/PersistenceDocTest.java#recover-on-restart-disabled @@ -132,10 +136,19 @@ Failure handling ^^^^^^^^^^^^^^^^ A persistent message that caused an exception will be received again by a processor after restart. To prevent -a replay of that message during recovery it can be marked as deleted. +a replay of that message during recovery it can be deleted. .. includecode:: code/docs/persistence/PersistenceDocTest.java#deletion +Message deletion +---------------- + +A processor can delete messages by calling the ``delete`` method with a ``Persistent`` message object or a +sequence number as argument. An optional ``physical`` parameter specifies whether the message shall be +physically deleted from the journal or only marked as deleted. In both cases, the message won't be replayed. +Later extensions to Akka persistence will allow to replay messages that have been marked as deleted which can +be useful for debugging purposes, for example. + Identifiers ----------- @@ -152,7 +165,7 @@ should override ``processorId``. .. includecode:: code/docs/persistence/PersistenceDocTest.java#processor-id-override -Later versions of the Akka persistence module will likely offer a possibility to migrate processor ids. +Later versions of Akka persistence will likely offer a possibility to migrate processor ids. Channels ======== @@ -166,21 +179,57 @@ message is retained by a channel if its previous delivery has been confirmed by A channel is ready to use once it has been created, no recovery or further activation is needed. A ``Deliver`` request instructs a channel to send a ``Persistent`` message to a destination where the sender of the ``Deliver`` request is forwarded to the destination. A processor may also reply to a message sender directly by using -``getSender()`` as channel destination. +``getSender()`` as channel destination (not shown). .. includecode:: code/docs/persistence/PersistenceDocTest.java#channel-example-reply -Channel destinations confirm the delivery of a ``Persistent`` message by calling its ``confirm()`` method. This -(asynchronously) writes a confirmation entry to the journal. Replayed messages internally contain these confirmation -entries which allows a channel to decide if a message should be retained or not. +Persistent messages delivered by a channel are of type ``ConfirmablePersistent``. It extends ``Persistent`` and +adds a ``confirm()`` method. Channel destinations confirm the delivery of a ``ConfirmablePersistent`` message by +calling ``confirm()``. This (asynchronously) writes a confirmation entry to the journal. Replayed messages +internally contain these confirmation entries which allows a channel to decide if a message should be retained or +not. ``ConfirmablePersistent`` messages can be used whereever ``Persistent`` messages are expected, which allows +processors to be used as channel destinations, for example. + +Message re-delivery +------------------- If an application crashes after a destination called ``confirm()`` but before the confirmation entry could have -been written to the journal then the unconfirmed message will be delivered again during next recovery and it is -the destination's responsibility to detect the duplicate or simply process the message again if it's an idempotent -receiver. Duplicates can be detected, for example, by tracking sequence numbers. +been written to the journal then the unconfirmed message will be re-delivered during next recovery of the sending +processor. It is the destination's responsibility to detect the duplicate or simply process the message again if +it's an idempotent receiver. Duplicates can be detected, for example, by tracking sequence numbers. -Currently, channels do not store ``Deliver`` requests or retry delivery on network or destination failures. This -feature (*reliable channels*) will be available soon. +Although a channel prevents message loss in case of sender (JVM) crashes it doesn't attempt re-deliveries if a +destination is unavailable. To achieve reliable communication with a (remote) target, a channel destination may +want to use the :ref:`reliable-proxy` or add the message to a queue that is managed by a third party message +broker, for example. In latter case, the channel destination will first add the received message to the queue +and then call ``confirm()`` on the received ``ConfirmablePersistent`` message. + +Persistent channels +------------------- + +Channels created with ``Channel.props`` do not persist messages. This is not necessary because these (transient) +channels shall only be used in combination with a sending processor that takes care of message persistence. + +However, if an application wants to use a channel standalone (without a sending processor), to prevent message +loss in case of a sender (JVM) crash, it should use a persistent channel which can be created with ``PersistentChannel.props``. +A persistent channel additionally persists messages before they are delivered. Persistence is achieved by an +internal processor that delegates delivery to a transient channel. A persistent channel, when used standalone, +can therefore provide the same message re-delivery semantics as a transient channel in combination with an +application-defined processor. + + .. includecode:: code/docs/persistence/PersistenceDocTest.java#persistent-channel-example + +By default, a persistent channel doesn't reply whether a ``Persistent`` message, sent with ``Deliver``, has been +successfully persisted or not. This can be enabled by creating the channel with the ``persistentReply`` parameter +set to ``true``: ``PersistentChannel.props(true)``. With this setting, either the successfully persisted message +is replied to the sender or a ``PersistenceFailure``. In case of a persistence failure, the sender should re-send +the message. + +Using a persistent channel in combination with an application-defined processor can make sense if destinations are +unavailable for a long time and an application doesn't want to buffer all messages in memory (but write them to the +journal instead). In this case, delivery can be disabled with ``DisableDelivery`` (to stop delivery and persist-only) +and re-enabled with ``EnableDelivery``. A disabled channel that receives ``EnableDelivery`` will restart itself and +re-deliver all persisted, unconfirmed messages before serving new ``Deliver`` requests. Sender resolution ----------------- @@ -208,7 +257,8 @@ Identifiers In the same way as :ref:`processors-java`, channels also have an identifier that defaults to a channel's path. A channel identifier can therefore be customized by using a custom actor name at channel creation. As already mentioned, this works well when using local actor references but may cause problems with remote actor references. In this case, an -application-defined channel id should be provided as argument to ``Channel.props(String)`` +application-defined channel id should be provided as argument to ``Channel.props(String)`` or +``PersistentChannel.props(String)``. .. includecode:: code/docs/persistence/PersistenceDocTest.java#channel-id-override @@ -234,8 +284,8 @@ Sequence number --------------- The sequence number of a ``Persistent`` message can be obtained via its ``sequenceNr`` method. Persistent -messages are assigned sequence numbers on a per-processor basis. A sequence starts at ``1L`` and doesn't contain -gaps unless a processor marks a message as deleted. +messages are assigned sequence numbers on a per-processor basis (or per persistent channel basis if used +standalone). A sequence starts at ``1L`` and doesn't contain gaps unless a processor deletes a message. .. _snapshots-java: @@ -331,8 +381,9 @@ Applications may also send a batch of ``Persistent`` messages to a processor via received by the processor separately (as ``Persistent`` messages). They are also replayed separately. Batch writes can not only increase the throughput of a processor but may also be necessary for consistency reasons. For example, in :ref:`event-sourcing-java`, all events that are generated and persisted by a single command are batch-written to -the journal. The recovery of an ``UntypedEventsourcedProcessor`` will therefore never be done partially i.e. with -only a subset of events persisted by a single command. +the journal (even if ``persist`` is called multiple times per command). The recovery of an +``UntypedEventsourcedProcessor`` will therefore never be done partially i.e. with only a subset of events persisted +by a single command. Storage plugins =============== @@ -399,10 +450,3 @@ it must add to the application configuration. If not specified, a default serializer is used, which is the ``JavaSerializer`` in this example. - -Upcoming features -================= - -* Reliable channels -* Extended deletion of messages and snapshots -* ... diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala index a732b1025c..beb23f9203 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala @@ -117,7 +117,7 @@ trait PersistenceDocSpec { class MyDestination extends Actor { def receive = { - case p @ Persistent(payload, _) ⇒ { + case p @ ConfirmablePersistent(payload, _) ⇒ { println(s"received ${payload}") p.confirm() } @@ -243,4 +243,17 @@ trait PersistenceDocSpec { //#batch-write system.shutdown() } + + new AnyRef { + import akka.actor._ + trait MyActor extends Actor { + val destination: ActorRef = null + //#persistent-channel-example + val channel = context.actorOf(PersistentChannel.props(), + name = "myPersistentChannel") + + channel ! Deliver(Persistent("example"), destination) + //#persistent-channel-example + } + } } diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala index 22e693b933..6f975f0e29 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala @@ -69,11 +69,11 @@ class PersistencePluginDocSpec extends WordSpec { } class MyJournal extends AsyncWriteJournal { - def writeAsync(persistent: PersistentImpl): Future[Unit] = ??? - def writeBatchAsync(persistentBatch: Seq[PersistentImpl]): Future[Unit] = ??? - def deleteAsync(persistent: PersistentImpl): Future[Unit] = ??? + def writeAsync(persistent: PersistentRepr): Future[Unit] = ??? + def writeBatchAsync(persistentBatch: Seq[PersistentRepr]): Future[Unit] = ??? + def deleteAsync(processorId: String, sequenceNr: Long, physical: Boolean): Future[Unit] = ??? def confirmAsync(processorId: String, sequenceNr: Long, channelId: String): Future[Unit] = ??? - def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentImpl) ⇒ Unit): Future[Long] = ??? + def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentRepr) ⇒ Unit): Future[Long] = ??? } class MySnapshotStore extends SnapshotStore { diff --git a/akka-docs/rst/scala/persistence.rst b/akka-docs/rst/scala/persistence.rst index 9f61d2b4a5..d36e71c7c0 100644 --- a/akka-docs/rst/scala/persistence.rst +++ b/akka-docs/rst/scala/persistence.rst @@ -53,6 +53,9 @@ Architecture * *Snapshot store*: A snapshot store persists snapshots of a processor's internal state. Snapshots are used for optimizing recovery times. The storage backend of a snapshot store is pluggable. +* *Event sourcing*. Based on the building blocks described above, Akka persistence provides abstractions for the + development of event sourced applications (see section :ref:`event-sourcing`) + Configuration ============= @@ -110,7 +113,8 @@ If not overridden, ``preStart`` sends a ``Recover()`` message to ``self``. Appli .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#recover-on-start-custom -Automated recovery on restart can be disabled by overriding ``preRestart`` with an empty implementation. +Upper sequence number bounds can be used to recover a processor to past state instead of current state. Automated +recovery on restart can be disabled by overriding ``preRestart`` with an empty implementation. .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#recover-on-restart-disabled @@ -127,10 +131,19 @@ Failure handling ^^^^^^^^^^^^^^^^ A persistent message that caused an exception will be received again by a processor after restart. To prevent -a replay of that message during recovery it can be marked as deleted. +a replay of that message during recovery it can be deleted. .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#deletion +Message deletion +---------------- + +A processor can delete messages by calling the ``delete`` method with a ``Persistent`` message object or a +sequence number as argument. An optional ``physical`` parameter specifies whether the message shall be +physically deleted from the journal or only marked as deleted. In both cases, the message won't be replayed. +Later extensions to Akka persistence will allow to replay messages that have been marked as deleted which can +be useful for debugging purposes, for example. + Identifiers ----------- @@ -147,7 +160,7 @@ should override ``processorId``. .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#processor-id-override -Later versions of the Akka persistence module will likely offer a possibility to migrate processor ids. +Later versions of Akka persistence will likely offer a possibility to migrate processor ids. Channels ======== @@ -161,21 +174,57 @@ message is retained by a channel if its previous delivery has been confirmed by A channel is ready to use once it has been created, no recovery or further activation is needed. A ``Deliver`` request instructs a channel to send a ``Persistent`` message to a destination where the sender of the ``Deliver`` request is forwarded to the destination. A processor may also reply to a message sender directly by using ``sender`` -as channel destination. +as channel destination (not shown). .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#channel-example-reply -Channel destinations confirm the delivery of a ``Persistent`` message by calling its ``confirm()`` method. This -(asynchronously) writes a confirmation entry to the journal. Replayed messages internally contain these confirmation -entries which allows a channel to decide if a message should be retained or not. +Persistent messages delivered by a channel are of type ``ConfirmablePersistent``. It extends ``Persistent`` and +adds a ``confirm()`` method. Channel destinations confirm the delivery of a ``ConfirmablePersistent`` message by +calling ``confirm()``. This (asynchronously) writes a confirmation entry to the journal. Replayed messages +internally contain these confirmation entries which allows a channel to decide if a message should be retained or +not. ``ConfirmablePersistent`` messages can be used whereever ``Persistent`` messages are expected, which allows +processors to be used as channel destinations, for example. + +Message re-delivery +------------------- If an application crashes after a destination called ``confirm()`` but before the confirmation entry could have -been written to the journal then the unconfirmed message will be delivered again during next recovery and it is -the destination's responsibility to detect the duplicate or simply process the message again if it's an idempotent -receiver. Duplicates can be detected, for example, by tracking sequence numbers. +been written to the journal then the unconfirmed message will be re-delivered during next recovery of the sending +processor. It is the destination's responsibility to detect the duplicate or simply process the message again if +it's an idempotent receiver. Duplicates can be detected, for example, by tracking sequence numbers. -Currently, channels do not store ``Deliver`` requests or retry delivery on network or destination failures. This -feature (*reliable channels*) will be available soon. +Although a channel prevents message loss in case of sender (JVM) crashes it doesn't attempt re-deliveries if a +destination is unavailable. To achieve reliable communication with a (remote) target, a channel destination may +want to use the :ref:`reliable-proxy` or add the message to a queue that is managed by a third party message +broker, for example. In latter case, the channel destination will first add the received message to the queue +and then call ``confirm()`` on the received ``ConfirmablePersistent`` message. + +Persistent channels +------------------- + +Channels created with ``Channel.props`` do not persist messages. This is not necessary because these (transient) +channels shall only be used in combination with a sending processor that takes care of message persistence. + +However, if an application wants to use a channel standalone (without a sending processor), to prevent message +loss in case of a sender (JVM) crash, it should use a persistent channel which can be created with ``PersistentChannel.props``. +A persistent channel additionally persists messages before they are delivered. Persistence is achieved by an +internal processor that delegates delivery to a transient channel. A persistent channel, when used standalone, +can therefore provide the same message re-delivery semantics as a transient channel in combination with an +application-defined processor. + + .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#persistent-channel-example + +By default, a persistent channel doesn't reply whether a ``Persistent`` message, sent with ``Deliver``, has been +successfully persisted or not. This can be enabled by creating the channel with +``PersistentChannel.props(persistentReply = true)``. With this setting, either the successfully persisted message +is replied to the sender or a ``PersistenceFailure``. In case of a persistence failure, the sender should re-send +the message. + +Using a persistent channel in combination with an application-defined processor can make sense if destinations are +unavailable for a long time and an application doesn't want to buffer all messages in memory (but write them to the +journal instead). In this case, delivery can be disabled with ``DisableDelivery`` (to stop delivery and persist-only) +and re-enabled with ``EnableDelivery``. A disabled channel that receives ``EnableDelivery`` will restart itself and +re-deliver all persisted, unconfirmed messages before serving new ``Deliver`` requests. Sender resolution ----------------- @@ -203,7 +252,8 @@ Identifiers In the same way as :ref:`processors`, channels also have an identifier that defaults to a channel's path. A channel identifier can therefore be customized by using a custom actor name at channel creation. As already mentioned, this works well when using local actor references but may cause problems with remote actor references. In this case, an -application-defined channel id should be provided as argument to ``Channel.props(String)`` +application-defined channel id should be provided as argument to ``Channel.props(String)`` or +``PersistentChannel.props(String)``. .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#channel-id-override @@ -241,8 +291,8 @@ method or by pattern matching .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#sequence-nr-pattern-matching -Persistent messages are assigned sequence numbers on a per-processor basis. A sequence starts at ``1L`` and -doesn't contain gaps unless a processor marks a message as deleted. +Persistent messages are assigned sequence numbers on a per-processor basis (or per persistent channel basis if used +standalone). A sequence starts at ``1L`` and doesn't contain gaps unless a processor deletes a message. .. _snapshots: @@ -342,8 +392,8 @@ Applications may also send a batch of ``Persistent`` messages to a processor via received by the processor separately (as ``Persistent`` messages). They are also replayed separately. Batch writes can not only increase the throughput of a processor but may also be necessary for consistency reasons. For example, in :ref:`event-sourcing`, all events that are generated and persisted by a single command are batch-written to the -journal. The recovery of an ``EventsourcedProcessor`` will therefore never be done partially i.e. with only a subset -of events persisted by a single command. +journal (even if ``persist`` is called multiple times per command). The recovery of an ``EventsourcedProcessor`` +will therefore never be done partially i.e. with only a subset of events persisted by a single command. Storage plugins =============== @@ -420,10 +470,3 @@ State machines State machines can be persisted by mixing in the ``FSM`` trait into processors. .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#fsm-example - -Upcoming features -================= - -* Reliable channels -* Extended deletion of messages and snapshots -* ... diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java index 5c0e8ed0c1..e8927e75fe 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncReplayPlugin.java @@ -7,22 +7,21 @@ package akka.persistence.journal.japi; import scala.concurrent.Future; import akka.japi.Procedure; -import akka.persistence.PersistentImpl; +import akka.persistence.PersistentRepr; interface AsyncReplayPlugin { //#async-replay-plugin-api /** - * Plugin Java API. - * - * Asynchronously replays persistent messages. Implementations replay a message - * by calling `replayCallback`. The returned future must be completed when all - * messages (matching the sequence number bounds) have been replayed. The future - * `Long` value must be the highest stored sequence number in the journal for the - * specified processor. The future must be completed with a failure if any of - * the persistent messages could not be replayed. + * Java API, Plugin API: asynchronously replays persistent messages. + * Implementations replay a message by calling `replayCallback`. The returned + * future must be completed when all messages (matching the sequence number + * bounds) have been replayed. The future `Long` value must be the highest + * stored sequence number in the journal for the specified processor. The + * future must be completed with a failure if any of the persistent messages + * could not be replayed. * * The `replayCallback` must also be called with messages that have been marked - * as deleted. In this case a replayed message's `deleted` field must be set to + * as deleted. In this case a replayed message's `deleted` method must return * `true`. * * The channel ids of delivery confirmations that are available for a replayed @@ -34,6 +33,6 @@ interface AsyncReplayPlugin { * @param replayCallback called to replay a single message. Can be called from any * thread. */ - Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback); + Future doReplayAsync(String processorId, long fromSequenceNr, long toSequenceNr, Procedure replayCallback); //#async-replay-plugin-api } diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java index 41f6cb24cf..cc6424b38f 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java @@ -6,36 +6,32 @@ package akka.persistence.journal.japi; import scala.concurrent.Future; -import akka.persistence.PersistentImpl; +import akka.persistence.PersistentRepr; interface AsyncWritePlugin { //#async-write-plugin-api /** - * Plugin Java API. - * - * Asynchronously writes a `persistent` message to the journal. + * Java API, Plugin API: asynchronously writes a `persistent` message to the journal. */ - Future doWriteAsync(PersistentImpl persistent); + Future doWriteAsync(PersistentRepr persistent); /** - * Plugin Java API. - * - * Asynchronously writes a batch of persistent messages to the journal. The batch write - * must be atomic i.e. either all persistent messages in the batch are written or none. + * Java API, Plugin API: asynchronously writes a batch of persistent messages to the + * journal. The batch write must be atomic i.e. either all persistent messages in the + * batch are written or none. */ - Future doWriteBatchAsync(Iterable persistentBatch); + Future doWriteBatchAsync(Iterable persistentBatch); /** - * Plugin Java API. - * - * Asynchronously marks a `persistent` message as deleted. + * Java API, Plugin API: asynchronously deletes a persistent message. If `physical` + * is set to `false`, the persistent message is marked as deleted, otherwise it is + * physically deleted. */ - Future doDeleteAsync(PersistentImpl persistent); + Future doDeleteAsync(String processorId, long sequenceNr, boolean physical); /** - * Plugin Java API. - * - * Asynchronously writes a delivery confirmation to the journal. + * Java API, Plugin API: asynchronously writes a delivery confirmation to the + * journal. */ Future doConfirmAsync(String processorId, long sequenceNr, String channelId); //#async-write-plugin-api diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java index 8660b5dab7..4ee4e85169 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java @@ -4,36 +4,31 @@ package akka.persistence.journal.japi; -import akka.persistence.PersistentImpl; +import akka.persistence.PersistentRepr; interface SyncWritePlugin { //#sync-write-plugin-api /** - * Plugin Java API. - * - * Synchronously writes a `persistent` message to the journal. + * Java API, Plugin API: synchronously writes a `persistent` message to the journal. */ - void doWrite(PersistentImpl persistent) throws Exception; + void doWrite(PersistentRepr persistent) throws Exception; /** - * Plugin Java API. - * - * Synchronously writes a batch of persistent messages to the journal. The batch write - * must be atomic i.e. either all persistent messages in the batch are written or none. + * Java API, Plugin API: synchronously writes a batch of persistent messages to the + * journal. The batch write must be atomic i.e. either all persistent messages in the + * batch are written or none. */ - void doWriteBatch(Iterable persistentBatch); + void doWriteBatch(Iterable persistentBatch); /** - * Plugin Java API. - * - * Synchronously marks a `persistent` message as deleted. + * Java API, Plugin API: synchronously deletes a persistent message. If `physical` + * is set to `false`, the persistent message is marked as deleted, otherwise it is + * physically deleted. */ - void doDelete(PersistentImpl persistent) throws Exception; + void doDelete(String processorId, long sequenceNr, boolean physical); /** - * Plugin Java API. - * - * Synchronously writes a delivery confirmation to the journal. + * Java API, Plugin API: synchronously writes a delivery confirmation to the journal. */ void doConfirm(String processorId, long sequenceNr, String channelId) throws Exception; //#sync-write-plugin-api diff --git a/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java b/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java index cd7bdab38e..aaf8e81112 100644 --- a/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java +++ b/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java @@ -736,21 +736,6 @@ public final class MessageFormats { com.google.protobuf.ByteString getProcessorIdBytes(); - // optional string channelId = 4; - /** - * optional string channelId = 4; - */ - boolean hasChannelId(); - /** - * optional string channelId = 4; - */ - java.lang.String getChannelId(); - /** - * optional string channelId = 4; - */ - com.google.protobuf.ByteString - getChannelIdBytes(); - // optional bool deleted = 5; /** * optional bool deleted = 5; @@ -791,6 +776,16 @@ public final class MessageFormats { com.google.protobuf.ByteString getConfirmsBytes(int index); + // optional bool confirmable = 11; + /** + * optional bool confirmable = 11; + */ + boolean hasConfirmable(); + /** + * optional bool confirmable = 11; + */ + boolean getConfirmable(); + // optional .ConfirmMessage confirmMessage = 10; /** * optional .ConfirmMessage confirmMessage = 10; @@ -909,18 +904,13 @@ public final class MessageFormats { processorId_ = input.readBytes(); break; } - case 34: { - bitField0_ |= 0x00000008; - channelId_ = input.readBytes(); - break; - } case 40: { - bitField0_ |= 0x00000010; + bitField0_ |= 0x00000008; deleted_ = input.readBool(); break; } case 48: { - bitField0_ |= 0x00000020; + bitField0_ |= 0x00000010; resolved_ = input.readBool(); break; } @@ -930,9 +920,9 @@ public final class MessageFormats { break; } case 66: { - if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { confirms_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000040; + mutable_bitField0_ |= 0x00000020; } confirms_.add(input.readBytes()); break; @@ -955,6 +945,11 @@ public final class MessageFormats { bitField0_ |= 0x00000040; break; } + case 88: { + bitField0_ |= 0x00000020; + confirmable_ = input.readBool(); + break; + } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { @@ -963,7 +958,7 @@ public final class MessageFormats { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { confirms_ = new com.google.protobuf.UnmodifiableLazyStringList(confirms_); } this.unknownFields = unknownFields.build(); @@ -1079,49 +1074,6 @@ public final class MessageFormats { } } - // optional string channelId = 4; - public static final int CHANNELID_FIELD_NUMBER = 4; - private java.lang.Object channelId_; - /** - * optional string channelId = 4; - */ - public boolean hasChannelId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional string channelId = 4; - */ - public java.lang.String getChannelId() { - java.lang.Object ref = channelId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - channelId_ = s; - } - return s; - } - } - /** - * optional string channelId = 4; - */ - public com.google.protobuf.ByteString - getChannelIdBytes() { - java.lang.Object ref = channelId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - channelId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - // optional bool deleted = 5; public static final int DELETED_FIELD_NUMBER = 5; private boolean deleted_; @@ -1129,7 +1081,7 @@ public final class MessageFormats { * optional bool deleted = 5; */ public boolean hasDeleted() { - return ((bitField0_ & 0x00000010) == 0x00000010); + return ((bitField0_ & 0x00000008) == 0x00000008); } /** * optional bool deleted = 5; @@ -1145,7 +1097,7 @@ public final class MessageFormats { * optional bool resolved = 6; */ public boolean hasResolved() { - return ((bitField0_ & 0x00000020) == 0x00000020); + return ((bitField0_ & 0x00000010) == 0x00000010); } /** * optional bool resolved = 6; @@ -1184,6 +1136,22 @@ public final class MessageFormats { return confirms_.getByteString(index); } + // optional bool confirmable = 11; + public static final int CONFIRMABLE_FIELD_NUMBER = 11; + private boolean confirmable_; + /** + * optional bool confirmable = 11; + */ + public boolean hasConfirmable() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + /** + * optional bool confirmable = 11; + */ + public boolean getConfirmable() { + return confirmable_; + } + // optional .ConfirmMessage confirmMessage = 10; public static final int CONFIRMMESSAGE_FIELD_NUMBER = 10; private akka.persistence.serialization.MessageFormats.ConfirmMessage confirmMessage_; @@ -1296,10 +1264,10 @@ public final class MessageFormats { payload_ = akka.persistence.serialization.MessageFormats.PersistentPayload.getDefaultInstance(); sequenceNr_ = 0L; processorId_ = ""; - channelId_ = ""; deleted_ = false; resolved_ = false; confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; + confirmable_ = false; confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); confirmTarget_ = ""; sender_ = ""; @@ -1332,12 +1300,9 @@ public final class MessageFormats { output.writeBytes(3, getProcessorIdBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(4, getChannelIdBytes()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, deleted_); } - if (((bitField0_ & 0x00000020) == 0x00000020)) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(6, resolved_); } if (((bitField0_ & 0x00000100) == 0x00000100)) { @@ -1352,6 +1317,9 @@ public final class MessageFormats { if (((bitField0_ & 0x00000040) == 0x00000040)) { output.writeMessage(10, confirmMessage_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(11, confirmable_); + } getUnknownFields().writeTo(output); } @@ -1374,14 +1342,10 @@ public final class MessageFormats { .computeBytesSize(3, getProcessorIdBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(4, getChannelIdBytes()); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, deleted_); } - if (((bitField0_ & 0x00000020) == 0x00000020)) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(6, resolved_); } @@ -1406,6 +1370,10 @@ public final class MessageFormats { size += com.google.protobuf.CodedOutputStream .computeMessageSize(10, confirmMessage_); } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(11, confirmable_); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -1534,13 +1502,13 @@ public final class MessageFormats { bitField0_ = (bitField0_ & ~0x00000002); processorId_ = ""; bitField0_ = (bitField0_ & ~0x00000004); - channelId_ = ""; - bitField0_ = (bitField0_ & ~0x00000008); deleted_ = false; - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000008); resolved_ = false; - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000010); confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000020); + confirmable_ = false; bitField0_ = (bitField0_ & ~0x00000040); if (confirmMessageBuilder_ == null) { confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); @@ -1599,21 +1567,21 @@ public final class MessageFormats { if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } - result.channelId_ = channelId_; + result.deleted_ = deleted_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } - result.deleted_ = deleted_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000020; - } result.resolved_ = resolved_; - if (((bitField0_ & 0x00000040) == 0x00000040)) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { confirms_ = new com.google.protobuf.UnmodifiableLazyStringList( confirms_); - bitField0_ = (bitField0_ & ~0x00000040); + bitField0_ = (bitField0_ & ~0x00000020); } result.confirms_ = confirms_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000020; + } + result.confirmable_ = confirmable_; if (((from_bitField0_ & 0x00000080) == 0x00000080)) { to_bitField0_ |= 0x00000040; } @@ -1657,11 +1625,6 @@ public final class MessageFormats { processorId_ = other.processorId_; onChanged(); } - if (other.hasChannelId()) { - bitField0_ |= 0x00000008; - channelId_ = other.channelId_; - onChanged(); - } if (other.hasDeleted()) { setDeleted(other.getDeleted()); } @@ -1671,13 +1634,16 @@ public final class MessageFormats { if (!other.confirms_.isEmpty()) { if (confirms_.isEmpty()) { confirms_ = other.confirms_; - bitField0_ = (bitField0_ & ~0x00000040); + bitField0_ = (bitField0_ & ~0x00000020); } else { ensureConfirmsIsMutable(); confirms_.addAll(other.confirms_); } onChanged(); } + if (other.hasConfirmable()) { + setConfirmable(other.getConfirmable()); + } if (other.hasConfirmMessage()) { mergeConfirmMessage(other.getConfirmMessage()); } @@ -1948,87 +1914,13 @@ public final class MessageFormats { return this; } - // optional string channelId = 4; - private java.lang.Object channelId_ = ""; - /** - * optional string channelId = 4; - */ - public boolean hasChannelId() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional string channelId = 4; - */ - public java.lang.String getChannelId() { - java.lang.Object ref = channelId_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - channelId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string channelId = 4; - */ - public com.google.protobuf.ByteString - getChannelIdBytes() { - java.lang.Object ref = channelId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - channelId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string channelId = 4; - */ - public Builder setChannelId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - channelId_ = value; - onChanged(); - return this; - } - /** - * optional string channelId = 4; - */ - public Builder clearChannelId() { - bitField0_ = (bitField0_ & ~0x00000008); - channelId_ = getDefaultInstance().getChannelId(); - onChanged(); - return this; - } - /** - * optional string channelId = 4; - */ - public Builder setChannelIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000008; - channelId_ = value; - onChanged(); - return this; - } - // optional bool deleted = 5; private boolean deleted_ ; /** * optional bool deleted = 5; */ public boolean hasDeleted() { - return ((bitField0_ & 0x00000010) == 0x00000010); + return ((bitField0_ & 0x00000008) == 0x00000008); } /** * optional bool deleted = 5; @@ -2040,7 +1932,7 @@ public final class MessageFormats { * optional bool deleted = 5; */ public Builder setDeleted(boolean value) { - bitField0_ |= 0x00000010; + bitField0_ |= 0x00000008; deleted_ = value; onChanged(); return this; @@ -2049,7 +1941,7 @@ public final class MessageFormats { * optional bool deleted = 5; */ public Builder clearDeleted() { - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000008); deleted_ = false; onChanged(); return this; @@ -2061,7 +1953,7 @@ public final class MessageFormats { * optional bool resolved = 6; */ public boolean hasResolved() { - return ((bitField0_ & 0x00000020) == 0x00000020); + return ((bitField0_ & 0x00000010) == 0x00000010); } /** * optional bool resolved = 6; @@ -2073,7 +1965,7 @@ public final class MessageFormats { * optional bool resolved = 6; */ public Builder setResolved(boolean value) { - bitField0_ |= 0x00000020; + bitField0_ |= 0x00000010; resolved_ = value; onChanged(); return this; @@ -2082,7 +1974,7 @@ public final class MessageFormats { * optional bool resolved = 6; */ public Builder clearResolved() { - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000010); resolved_ = false; onChanged(); return this; @@ -2091,9 +1983,9 @@ public final class MessageFormats { // repeated string confirms = 8; private com.google.protobuf.LazyStringList confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureConfirmsIsMutable() { - if (!((bitField0_ & 0x00000040) == 0x00000040)) { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { confirms_ = new com.google.protobuf.LazyStringArrayList(confirms_); - bitField0_ |= 0x00000040; + bitField0_ |= 0x00000020; } } /** @@ -2163,7 +2055,7 @@ public final class MessageFormats { */ public Builder clearConfirms() { confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000040); + bitField0_ = (bitField0_ & ~0x00000020); onChanged(); return this; } @@ -2181,6 +2073,39 @@ public final class MessageFormats { return this; } + // optional bool confirmable = 11; + private boolean confirmable_ ; + /** + * optional bool confirmable = 11; + */ + public boolean hasConfirmable() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + /** + * optional bool confirmable = 11; + */ + public boolean getConfirmable() { + return confirmable_; + } + /** + * optional bool confirmable = 11; + */ + public Builder setConfirmable(boolean value) { + bitField0_ |= 0x00000040; + confirmable_ = value; + onChanged(); + return this; + } + /** + * optional bool confirmable = 11; + */ + public Builder clearConfirmable() { + bitField0_ = (bitField0_ & ~0x00000040); + confirmable_ = false; + onChanged(); + return this; + } + // optional .ConfirmMessage confirmMessage = 10; private akka.persistence.serialization.MessageFormats.ConfirmMessage confirmMessage_ = akka.persistence.serialization.MessageFormats.ConfirmMessage.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< @@ -3745,6 +3670,862 @@ public final class MessageFormats { // @@protoc_insertion_point(class_scope:ConfirmMessage) } + public interface DeliverMessageOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .PersistentMessage persistent = 1; + /** + * optional .PersistentMessage persistent = 1; + */ + boolean hasPersistent(); + /** + * optional .PersistentMessage persistent = 1; + */ + akka.persistence.serialization.MessageFormats.PersistentMessage getPersistent(); + /** + * optional .PersistentMessage persistent = 1; + */ + akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getPersistentOrBuilder(); + + // optional string destination = 2; + /** + * optional string destination = 2; + */ + boolean hasDestination(); + /** + * optional string destination = 2; + */ + java.lang.String getDestination(); + /** + * optional string destination = 2; + */ + com.google.protobuf.ByteString + getDestinationBytes(); + + // optional .DeliverMessage.ResolveStrategy resolve = 3; + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + boolean hasResolve(); + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy getResolve(); + } + /** + * Protobuf type {@code DeliverMessage} + */ + public static final class DeliverMessage extends + com.google.protobuf.GeneratedMessage + implements DeliverMessageOrBuilder { + // Use DeliverMessage.newBuilder() to construct. + private DeliverMessage(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private DeliverMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final DeliverMessage defaultInstance; + public static DeliverMessage getDefaultInstance() { + return defaultInstance; + } + + public DeliverMessage getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DeliverMessage( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + akka.persistence.serialization.MessageFormats.PersistentMessage.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = persistent_.toBuilder(); + } + persistent_ = input.readMessage(akka.persistence.serialization.MessageFormats.PersistentMessage.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(persistent_); + persistent_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + destination_ = input.readBytes(); + break; + } + case 24: { + int rawValue = input.readEnum(); + akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy value = akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(3, rawValue); + } else { + bitField0_ |= 0x00000004; + resolve_ = value; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.DeliverMessage.class, akka.persistence.serialization.MessageFormats.DeliverMessage.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public DeliverMessage parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DeliverMessage(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code DeliverMessage.ResolveStrategy} + */ + public enum ResolveStrategy + implements com.google.protobuf.ProtocolMessageEnum { + /** + * Off = 1; + */ + Off(0, 1), + /** + * Sender = 2; + */ + Sender(1, 2), + /** + * Destination = 3; + */ + Destination(2, 3), + ; + + /** + * Off = 1; + */ + public static final int Off_VALUE = 1; + /** + * Sender = 2; + */ + public static final int Sender_VALUE = 2; + /** + * Destination = 3; + */ + public static final int Destination_VALUE = 3; + + + public final int getNumber() { return value; } + + public static ResolveStrategy valueOf(int value) { + switch (value) { + case 1: return Off; + case 2: return Sender; + case 3: return Destination; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ResolveStrategy findValueByNumber(int number) { + return ResolveStrategy.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.DeliverMessage.getDescriptor().getEnumTypes().get(0); + } + + private static final ResolveStrategy[] VALUES = values(); + + public static ResolveStrategy valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ResolveStrategy(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:DeliverMessage.ResolveStrategy) + } + + private int bitField0_; + // optional .PersistentMessage persistent = 1; + public static final int PERSISTENT_FIELD_NUMBER = 1; + private akka.persistence.serialization.MessageFormats.PersistentMessage persistent_; + /** + * optional .PersistentMessage persistent = 1; + */ + public boolean hasPersistent() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .PersistentMessage persistent = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentMessage getPersistent() { + return persistent_; + } + /** + * optional .PersistentMessage persistent = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getPersistentOrBuilder() { + return persistent_; + } + + // optional string destination = 2; + public static final int DESTINATION_FIELD_NUMBER = 2; + private java.lang.Object destination_; + /** + * optional string destination = 2; + */ + public boolean hasDestination() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string destination = 2; + */ + public java.lang.String getDestination() { + java.lang.Object ref = destination_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + destination_ = s; + } + return s; + } + } + /** + * optional string destination = 2; + */ + public com.google.protobuf.ByteString + getDestinationBytes() { + java.lang.Object ref = destination_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + destination_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional .DeliverMessage.ResolveStrategy resolve = 3; + public static final int RESOLVE_FIELD_NUMBER = 3; + private akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy resolve_; + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + public boolean hasResolve() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + public akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy getResolve() { + return resolve_; + } + + private void initFields() { + persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); + destination_ = ""; + resolve_ = akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy.Off; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasPersistent()) { + if (!getPersistent().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, persistent_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getDestinationBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeEnum(3, resolve_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, persistent_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getDestinationBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(3, resolve_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(akka.persistence.serialization.MessageFormats.DeliverMessage prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code DeliverMessage} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements akka.persistence.serialization.MessageFormats.DeliverMessageOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_fieldAccessorTable + .ensureFieldAccessorsInitialized( + akka.persistence.serialization.MessageFormats.DeliverMessage.class, akka.persistence.serialization.MessageFormats.DeliverMessage.Builder.class); + } + + // Construct using akka.persistence.serialization.MessageFormats.DeliverMessage.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPersistentFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (persistentBuilder_ == null) { + persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); + } else { + persistentBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + destination_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + resolve_ = akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy.Off; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_descriptor; + } + + public akka.persistence.serialization.MessageFormats.DeliverMessage getDefaultInstanceForType() { + return akka.persistence.serialization.MessageFormats.DeliverMessage.getDefaultInstance(); + } + + public akka.persistence.serialization.MessageFormats.DeliverMessage build() { + akka.persistence.serialization.MessageFormats.DeliverMessage result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public akka.persistence.serialization.MessageFormats.DeliverMessage buildPartial() { + akka.persistence.serialization.MessageFormats.DeliverMessage result = new akka.persistence.serialization.MessageFormats.DeliverMessage(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (persistentBuilder_ == null) { + result.persistent_ = persistent_; + } else { + result.persistent_ = persistentBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.destination_ = destination_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.resolve_ = resolve_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof akka.persistence.serialization.MessageFormats.DeliverMessage) { + return mergeFrom((akka.persistence.serialization.MessageFormats.DeliverMessage)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(akka.persistence.serialization.MessageFormats.DeliverMessage other) { + if (other == akka.persistence.serialization.MessageFormats.DeliverMessage.getDefaultInstance()) return this; + if (other.hasPersistent()) { + mergePersistent(other.getPersistent()); + } + if (other.hasDestination()) { + bitField0_ |= 0x00000002; + destination_ = other.destination_; + onChanged(); + } + if (other.hasResolve()) { + setResolve(other.getResolve()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasPersistent()) { + if (!getPersistent().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + akka.persistence.serialization.MessageFormats.DeliverMessage parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (akka.persistence.serialization.MessageFormats.DeliverMessage) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // optional .PersistentMessage persistent = 1; + private akka.persistence.serialization.MessageFormats.PersistentMessage persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder> persistentBuilder_; + /** + * optional .PersistentMessage persistent = 1; + */ + public boolean hasPersistent() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * optional .PersistentMessage persistent = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentMessage getPersistent() { + if (persistentBuilder_ == null) { + return persistent_; + } else { + return persistentBuilder_.getMessage(); + } + } + /** + * optional .PersistentMessage persistent = 1; + */ + public Builder setPersistent(akka.persistence.serialization.MessageFormats.PersistentMessage value) { + if (persistentBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + persistent_ = value; + onChanged(); + } else { + persistentBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .PersistentMessage persistent = 1; + */ + public Builder setPersistent( + akka.persistence.serialization.MessageFormats.PersistentMessage.Builder builderForValue) { + if (persistentBuilder_ == null) { + persistent_ = builderForValue.build(); + onChanged(); + } else { + persistentBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .PersistentMessage persistent = 1; + */ + public Builder mergePersistent(akka.persistence.serialization.MessageFormats.PersistentMessage value) { + if (persistentBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + persistent_ != akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance()) { + persistent_ = + akka.persistence.serialization.MessageFormats.PersistentMessage.newBuilder(persistent_).mergeFrom(value).buildPartial(); + } else { + persistent_ = value; + } + onChanged(); + } else { + persistentBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * optional .PersistentMessage persistent = 1; + */ + public Builder clearPersistent() { + if (persistentBuilder_ == null) { + persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); + onChanged(); + } else { + persistentBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * optional .PersistentMessage persistent = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentMessage.Builder getPersistentBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getPersistentFieldBuilder().getBuilder(); + } + /** + * optional .PersistentMessage persistent = 1; + */ + public akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getPersistentOrBuilder() { + if (persistentBuilder_ != null) { + return persistentBuilder_.getMessageOrBuilder(); + } else { + return persistent_; + } + } + /** + * optional .PersistentMessage persistent = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder> + getPersistentFieldBuilder() { + if (persistentBuilder_ == null) { + persistentBuilder_ = new com.google.protobuf.SingleFieldBuilder< + akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder>( + persistent_, + getParentForChildren(), + isClean()); + persistent_ = null; + } + return persistentBuilder_; + } + + // optional string destination = 2; + private java.lang.Object destination_ = ""; + /** + * optional string destination = 2; + */ + public boolean hasDestination() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional string destination = 2; + */ + public java.lang.String getDestination() { + java.lang.Object ref = destination_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + destination_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string destination = 2; + */ + public com.google.protobuf.ByteString + getDestinationBytes() { + java.lang.Object ref = destination_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + destination_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string destination = 2; + */ + public Builder setDestination( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + destination_ = value; + onChanged(); + return this; + } + /** + * optional string destination = 2; + */ + public Builder clearDestination() { + bitField0_ = (bitField0_ & ~0x00000002); + destination_ = getDefaultInstance().getDestination(); + onChanged(); + return this; + } + /** + * optional string destination = 2; + */ + public Builder setDestinationBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + destination_ = value; + onChanged(); + return this; + } + + // optional .DeliverMessage.ResolveStrategy resolve = 3; + private akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy resolve_ = akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy.Off; + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + public boolean hasResolve() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + public akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy getResolve() { + return resolve_; + } + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + public Builder setResolve(akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + resolve_ = value; + onChanged(); + return this; + } + /** + * optional .DeliverMessage.ResolveStrategy resolve = 3; + */ + public Builder clearResolve() { + bitField0_ = (bitField0_ & ~0x00000004); + resolve_ = akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy.Off; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:DeliverMessage) + } + + static { + defaultInstance = new DeliverMessage(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:DeliverMessage) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_PersistentMessageBatch_descriptor; private static @@ -3765,6 +4546,11 @@ public final class MessageFormats { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_ConfirmMessage_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_DeliverMessage_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_DeliverMessage_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -3776,18 +4562,23 @@ public final class MessageFormats { java.lang.String[] descriptorData = { "\n\024MessageFormats.proto\";\n\026PersistentMess" + "ageBatch\022!\n\005batch\030\001 \003(\0132\022.PersistentMess" + - "age\"\371\001\n\021PersistentMessage\022#\n\007payload\030\001 \001" + + "age\"\373\001\n\021PersistentMessage\022#\n\007payload\030\001 \001" + "(\0132\022.PersistentPayload\022\022\n\nsequenceNr\030\002 \001" + - "(\003\022\023\n\013processorId\030\003 \001(\t\022\021\n\tchannelId\030\004 \001" + - "(\t\022\017\n\007deleted\030\005 \001(\010\022\020\n\010resolved\030\006 \001(\010\022\020\n" + - "\010confirms\030\010 \003(\t\022\'\n\016confirmMessage\030\n \001(\0132" + - "\017.ConfirmMessage\022\025\n\rconfirmTarget\030\t \001(\t\022" + - "\016\n\006sender\030\007 \001(\t\"S\n\021PersistentPayload\022\024\n\014" + - "serializerId\030\001 \002(\005\022\017\n\007payload\030\002 \002(\014\022\027\n\017p", - "ayloadManifest\030\003 \001(\014\"L\n\016ConfirmMessage\022\023" + - "\n\013processorId\030\001 \001(\t\022\022\n\nsequenceNr\030\002 \001(\003\022" + - "\021\n\tchannelId\030\003 \001(\tB\"\n\036akka.persistence.s" + - "erializationH\001" + "(\003\022\023\n\013processorId\030\003 \001(\t\022\017\n\007deleted\030\005 \001(\010" + + "\022\020\n\010resolved\030\006 \001(\010\022\020\n\010confirms\030\010 \003(\t\022\023\n\013" + + "confirmable\030\013 \001(\010\022\'\n\016confirmMessage\030\n \001(" + + "\0132\017.ConfirmMessage\022\025\n\rconfirmTarget\030\t \001(" + + "\t\022\016\n\006sender\030\007 \001(\t\"S\n\021PersistentPayload\022\024" + + "\n\014serializerId\030\001 \002(\005\022\017\n\007payload\030\002 \002(\014\022\027\n", + "\017payloadManifest\030\003 \001(\014\"L\n\016ConfirmMessage" + + "\022\023\n\013processorId\030\001 \001(\t\022\022\n\nsequenceNr\030\002 \001(" + + "\003\022\021\n\tchannelId\030\003 \001(\t\"\270\001\n\016DeliverMessage\022" + + "&\n\npersistent\030\001 \001(\0132\022.PersistentMessage\022" + + "\023\n\013destination\030\002 \001(\t\0220\n\007resolve\030\003 \001(\0162\037." + + "DeliverMessage.ResolveStrategy\"7\n\017Resolv" + + "eStrategy\022\007\n\003Off\020\001\022\n\n\006Sender\020\002\022\017\n\013Destin" + + "ation\020\003B\"\n\036akka.persistence.serializatio" + + "nH\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -3805,7 +4596,7 @@ public final class MessageFormats { internal_static_PersistentMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PersistentMessage_descriptor, - new java.lang.String[] { "Payload", "SequenceNr", "ProcessorId", "ChannelId", "Deleted", "Resolved", "Confirms", "ConfirmMessage", "ConfirmTarget", "Sender", }); + new java.lang.String[] { "Payload", "SequenceNr", "ProcessorId", "Deleted", "Resolved", "Confirms", "Confirmable", "ConfirmMessage", "ConfirmTarget", "Sender", }); internal_static_PersistentPayload_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_PersistentPayload_fieldAccessorTable = new @@ -3818,6 +4609,12 @@ public final class MessageFormats { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ConfirmMessage_descriptor, new java.lang.String[] { "ProcessorId", "SequenceNr", "ChannelId", }); + internal_static_DeliverMessage_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_DeliverMessage_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_DeliverMessage_descriptor, + new java.lang.String[] { "Persistent", "Destination", "Resolve", }); return null; } }; diff --git a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java index b7623ab79b..9b844c90df 100644 --- a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java @@ -12,9 +12,7 @@ import akka.persistence.*; interface SnapshotStorePlugin { //#snapshot-store-plugin-api /** - * Plugin Java API. - * - * Asynchronously loads a snapshot. + * Java API, Plugin API: asynchronously loads a snapshot. * * @param processorId processor id. * @param criteria selection criteria for loading. @@ -22,9 +20,7 @@ interface SnapshotStorePlugin { Future> doLoadAsync(String processorId, SnapshotSelectionCriteria criteria); /** - * Plugin Java API. - * - * Asynchronously saves a snapshot. + * Java API, Plugin API: asynchronously saves a snapshot. * * @param metadata snapshot metadata. * @param snapshot snapshot. @@ -32,18 +28,14 @@ interface SnapshotStorePlugin { Future doSaveAsync(SnapshotMetadata metadata, Object snapshot); /** - * Plugin Java API. - * - * Called after successful saving of a snapshot. + * Java API, Plugin API: called after successful saving of a snapshot. * * @param metadata snapshot metadata. */ void onSaved(SnapshotMetadata metadata) throws Exception; /** - * Plugin Java API. - * - * Deletes the snapshot identified by `metadata`. + * Java API, Plugin API: deletes the snapshot identified by `metadata`. * * @param metadata snapshot metadata. */ diff --git a/akka-persistence/src/main/protobuf/MessageFormats.proto b/akka-persistence/src/main/protobuf/MessageFormats.proto index 4b31a4e459..986a65a80b 100644 --- a/akka-persistence/src/main/protobuf/MessageFormats.proto +++ b/akka-persistence/src/main/protobuf/MessageFormats.proto @@ -13,10 +13,10 @@ message PersistentMessage { optional PersistentPayload payload = 1; optional int64 sequenceNr = 2; optional string processorId = 3; - optional string channelId = 4; optional bool deleted = 5; optional bool resolved = 6; repeated string confirms = 8; + optional bool confirmable = 11; optional ConfirmMessage confirmMessage = 10; optional string confirmTarget = 9; optional string sender = 7; @@ -33,3 +33,15 @@ message ConfirmMessage { optional int64 sequenceNr = 2; optional string channelId = 3; } + +message DeliverMessage { + enum ResolveStrategy { + Off = 1; + Sender = 2; + Destination = 3; + } + + optional PersistentMessage persistent = 1; + optional string destination = 2; + optional ResolveStrategy resolve = 3; +} \ No newline at end of file diff --git a/akka-persistence/src/main/resources/reference.conf b/akka-persistence/src/main/resources/reference.conf index 633b6ff2da..4360be4ec7 100644 --- a/akka-persistence/src/main/resources/reference.conf +++ b/akka-persistence/src/main/resources/reference.conf @@ -18,9 +18,7 @@ akka { serialization-bindings { "akka.persistence.serialization.Snapshot" = akka-persistence-snapshot - "akka.persistence.PersistentBatch" = akka-persistence-message - "akka.persistence.PersistentImpl" = akka-persistence-message - "akka.persistence.Confirm" = akka-persistence-message + "akka.persistence.serialization.Message" = akka-persistence-message } } diff --git a/akka-persistence/src/main/scala/akka/persistence/Channel.scala b/akka-persistence/src/main/scala/akka/persistence/Channel.scala index 68de29cce8..1230fee7ff 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Channel.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Channel.scala @@ -4,13 +4,15 @@ package akka.persistence +import akka.AkkaException import akka.actor._ +import akka.persistence.serialization.Message + /** - * A channel is used by [[Processor]]s for sending received persistent messages to destinations. - * It prevents redundant delivery of messages to these destinations when a processor is recovered - * i.e. receives replayed messages. This requires that channel destinations confirm the receipt of - * persistent messages by calling `confirm()` on the [[Persistent]] message. + * A channel is used by [[Processor]]s for sending [[Persistent]] messages to destinations. The main + * responsibility of a channel is to prevent redundant delivery of replayed messages to destinations + * when a processor is recovered. * * A channel can be instructed to deliver a persistent message to a `destination` via the [[Deliver]] * command. @@ -45,56 +47,69 @@ import akka.actor._ * } * }}} * + * Redundant delivery of messages to destinations is only prevented if the receipt of these messages + * is explicitly confirmed. Therefore, persistent messages that are delivered via a channel are of type + * [[ConfirmablePersistent]]. Their receipt can be confirmed by a destination by calling the `confirm()` + * method on these messages. + * + * {{{ + * class MyDestination extends Actor { + * def receive = { + * case cp @ ConfirmablePersistent(payload, sequenceNr) => cp.confirm() + * } + * } + * }}} + * + * A channel will only re-deliver messages if the sending processor is recovered and delivery of these + * messages has not been confirmed yet. Hence, a channel can be used to avoid message loss in case of + * sender JVM crashes, for example. A channel, however, does not attempt any re-deliveries should a + * destination be unavailable. Re-delivery to destinations (in case of network failures or destination + * JVM crashes) is an application-level concern and can be done by using a reliable proxy, for example. + * * @see [[Deliver]] */ -class Channel private (_channelId: Option[String]) extends Actor with Stash { +sealed class Channel private[akka] (_channelId: Option[String]) extends Actor with Stash { private val extension = Persistence(context.system) private val id = _channelId match { case Some(cid) ⇒ cid case None ⇒ extension.channelId(self) } - /** - * Creates a new channel with a generated channel id. - */ - def this() = this(None) - - /** - * Creates a new channel with specified channel id. - * - * @param channelId channel id. - */ - def this(channelId: String) = this(Some(channelId)) - import ResolvedDelivery._ private val delivering: Actor.Receive = { - case Deliver(persistent: PersistentImpl, destination, resolve) ⇒ { + case Deliver(persistent: PersistentRepr, destination, resolve) ⇒ { if (!persistent.confirms.contains(id)) { - val msg = persistent.copy(channelId = id, - confirmTarget = extension.journalFor(persistent.processorId), - confirmMessage = Confirm(persistent.processorId, persistent.sequenceNr, id)) + val prepared = prepareDelivery(persistent) resolve match { - case Resolve.Sender if !persistent.resolved ⇒ { - context.actorOf(Props(classOf[ResolvedSenderDelivery], msg, destination, sender)) ! DeliverResolved + case Resolve.Sender if !prepared.resolved ⇒ { + context.actorOf(Props(classOf[ResolvedSenderDelivery], prepared, destination, sender)) ! DeliverResolved context.become(buffering, false) } - case Resolve.Destination if !persistent.resolved ⇒ { - context.actorOf(Props(classOf[ResolvedDestinationDelivery], msg, destination, sender)) ! DeliverResolved + case Resolve.Destination if !prepared.resolved ⇒ { + context.actorOf(Props(classOf[ResolvedDestinationDelivery], prepared, destination, sender)) ! DeliverResolved context.become(buffering, false) } - case _ ⇒ destination tell (msg, sender) + case _ ⇒ destination tell (prepared, sender) } } + unstash() } } private val buffering: Actor.Receive = { - case DeliveredResolved | DeliveredUnresolved ⇒ { context.unbecome(); unstashAll() } // TODO: optimize + case DeliveredResolved | DeliveredUnresolved ⇒ { context.unbecome(); unstash() } case _: Deliver ⇒ stash() } def receive = delivering + + private[akka] def prepareDelivery(persistent: PersistentRepr): PersistentRepr = { + ConfirmablePersistentImpl( + persistent = persistent, + confirmTarget = extension.journalFor(persistent.processorId), + confirmMessage = Confirm(persistent.processorId, persistent.sequenceNr, id)) + } } object Channel { @@ -102,7 +117,7 @@ object Channel { * Returns a channel configuration object for creating a [[Channel]] with a * generated id. */ - def props(): Props = Props(classOf[Channel]) + def props(): Props = Props(classOf[Channel], None) /** * Returns a channel configuration object for creating a [[Channel]] with the @@ -110,12 +125,159 @@ object Channel { * * @param channelId channel id. */ - def props(channelId: String): Props = Props(classOf[Channel], channelId) + def props(channelId: String): Props = Props(classOf[Channel], Some(channelId)) } /** - * Instructs a [[Channel]] to deliver `persistent` message to destination `destination`. - * The `resolve` parameter can be: + * A [[PersistentChannel]] implements the same functionality as a [[Channel]] but additionally + * persists messages before they are delivered. Therefore, the main use case of a persistent + * channel is standalone usage i.e. independent of a sending [[Processor]]. Messages that have + * been persisted by a persistent channel are deleted again when destinations confirm the receipt + * of these messages. + * + * Using a persistent channel in combination with a [[Processor]] can make sense if destinations + * are unavailable for a long time and an application doesn't want to buffer all messages in + * memory (but write them to a journal instead). In this case, delivery can be disabled with + * [[DisableDelivery]] (to stop delivery and persist-only) and re-enabled with [[EnableDelivery]]. + * + * A persistent channel can also be configured to reply whether persisting a message was successful + * or not (see `PersistentChannel.props` methods). If enabled, the sender will receive the persisted + * message as reply (i.e. a [[Persistent]] message), otherwise a [[PersistenceFailure]] message. + * + * A persistent channel will only re-deliver un-confirmed, stored messages if it is started or re- + * enabled with [[EnableDelivery]]. Hence, a persistent channel can be used to avoid message loss + * in case of sender JVM crashes, for example. A channel, however, does not attempt any re-deliveries + * should a destination be unavailable. Re-delivery to destinations (in case of network failures or + * destination JVM crashes) is an application-level concern and can be done by using a reliable proxy, + * for example. + */ +final class PersistentChannel private[akka] (_channelId: Option[String], persistentReply: Boolean) extends EventsourcedProcessor { + override val processorId = _channelId.getOrElse(super.processorId) + + private val journal = Persistence(context.system).journalFor(processorId) + private val channel = context.actorOf(Props(classOf[NoPrepChannel], processorId)) + + private var deliveryEnabled = true + + def receiveReplay: Receive = { + case Deliver(persistent: PersistentRepr, destination, resolve) ⇒ deliver(prepareDelivery(persistent), destination, resolve) + } + + def receiveCommand: Receive = { + case d @ Deliver(persistent: PersistentRepr, destination, resolve) ⇒ { + if (!persistent.confirms.contains(processorId)) { + persist(d) { _ ⇒ + val prepared = prepareDelivery(persistent) + + if (persistent.processorId != PersistentRepr.Undefined) + journal ! Confirm(persistent.processorId, persistent.sequenceNr, processorId) + + if (persistentReply) + sender ! prepared + + if (deliveryEnabled) + deliver(prepared, destination, resolve) + } + } + } + case c: Confirm ⇒ deleteMessage(c.sequenceNr, true) + case DisableDelivery ⇒ deliveryEnabled = false + case EnableDelivery if (!deliveryEnabled) ⇒ throw new ChannelRestartRequiredException + case p: PersistenceFailure if (persistentReply) ⇒ sender ! p + } + + private def prepareDelivery(persistent: PersistentRepr): PersistentRepr = currentPersistentMessage.map { current ⇒ + val sequenceNr = if (persistent.sequenceNr == 0L) current.sequenceNr else persistent.sequenceNr + val resolved = persistent.resolved && current.asInstanceOf[PersistentRepr].resolved + persistent.update(sequenceNr = sequenceNr, resolved = resolved) + } getOrElse (persistent) + + private def deliver(persistent: PersistentRepr, destination: ActorRef, resolve: Resolve.ResolveStrategy) = currentPersistentMessage.foreach { current ⇒ + channel forward Deliver(persistent = ConfirmablePersistentImpl(persistent, + confirmTarget = self, + confirmMessage = Confirm(processorId, current.sequenceNr, PersistentRepr.Undefined)), destination, resolve) + } +} + +object PersistentChannel { + /** + * Returns a channel configuration object for creating a [[PersistentChannel]] with a + * generated id. The sender will not receive persistence completion replies. + */ + def props(): Props = props(persistentReply = false) + + /** + * Returns a channel configuration object for creating a [[PersistentChannel]] with a + * generated id. + * + * @param persistentReply if `true` the sender will receive the successfully stored + * [[Persistent]] message that has been submitted with a + * [[Deliver]] request, or a [[PersistenceFailure]] message + * in case of a persistence failure. + */ + def props(persistentReply: Boolean): Props = Props(classOf[PersistentChannel], None, persistentReply) + + /** + * Returns a channel configuration object for creating a [[PersistentChannel]] with the + * specified id. The sender will not receive persistence completion replies. + * + * @param channelId channel id. + */ + def props(channelId: String): Props = props(channelId, persistentReply = false) + + /** + * Returns a channel configuration object for creating a [[PersistentChannel]] with the + * specified id. + * + * @param channelId channel id. + * @param persistentReply if `true` the sender will receive the successfully stored + * [[Persistent]] message that has been submitted with a + * [[Deliver]] request, or a [[PersistenceFailure]] message + * in case of a persistence failure. + */ + def props(channelId: String, persistentReply: Boolean): Props = Props(classOf[PersistentChannel], Some(channelId), persistentReply) +} + +/** + * Instructs a [[PersistentChannel]] to disable the delivery of [[Persistent]] messages to their destination. + * The persistent channel, however, continues to persist messages (for later delivery). + * + * @see [[EnableDelivery]] + */ +@SerialVersionUID(1L) +case object DisableDelivery { + /** + * Java API. + */ + def getInstance = this +} + +/** + * Instructs a [[PersistentChannel]] to re-enable the delivery of [[Persistent]] messages to their destination. + * This will first deliver all messages that have been stored by a persistent channel for which no confirmation + * is available yet. New [[Deliver]] requests are processed after all stored messages have been delivered. This + * request only has an effect if a persistent channel has previously been disabled with [[DisableDelivery]]. + * + * @see [[DisableDelivery]] + */ +@SerialVersionUID(1L) +case object EnableDelivery { + /** + * Java API. + */ + def getInstance = this +} + +/** + * Thrown by a persistent channel when [[EnableDelivery]] has been requested and delivery has been previously + * disabled for that channel. + */ +@SerialVersionUID(1L) +class ChannelRestartRequiredException extends AkkaException("channel restart required for enabling delivery") + +/** + * Instructs a [[Channel]] or [[PersistentChannel]] to deliver `persistent` message to + * destination `destination`. The `resolve` parameter can be: * * - `Resolve.Destination`: will resolve a new destination reference from the specified * `destination`s path. The `persistent` message will be sent to the newly resolved @@ -160,7 +322,7 @@ object Channel { * @param resolve resolve strategy. */ @SerialVersionUID(1L) -case class Deliver(persistent: Persistent, destination: ActorRef, resolve: Resolve.ResolveStrategy = Resolve.Off) +case class Deliver(persistent: Persistent, destination: ActorRef, resolve: Resolve.ResolveStrategy = Resolve.Off) extends Message object Deliver { /** @@ -253,9 +415,9 @@ private object ResolvedDelivery { * Resolves `destination` before sending `persistent` message to the resolved destination using * the specified sender (`sdr`) as message sender. */ -private class ResolvedDestinationDelivery(persistent: PersistentImpl, destination: ActorRef, sdr: ActorRef) extends ResolvedDelivery { +private class ResolvedDestinationDelivery(persistent: PersistentRepr, destination: ActorRef, sdr: ActorRef) extends ResolvedDelivery { val path = destination.path - def onResolveSuccess(ref: ActorRef) = ref tell (persistent.copy(resolved = true), sdr) + def onResolveSuccess(ref: ActorRef) = ref tell (persistent.update(resolved = true), sdr) def onResolveFailure() = destination tell (persistent, sdr) } @@ -263,9 +425,15 @@ private class ResolvedDestinationDelivery(persistent: PersistentImpl, destinatio * Resolves `sdr` before sending `persistent` message to specified `destination` using * the resolved sender as message sender. */ -private class ResolvedSenderDelivery(persistent: PersistentImpl, destination: ActorRef, sdr: ActorRef) extends ResolvedDelivery { +private class ResolvedSenderDelivery(persistent: PersistentRepr, destination: ActorRef, sdr: ActorRef) extends ResolvedDelivery { val path = sdr.path - def onResolveSuccess(ref: ActorRef) = destination tell (persistent.copy(resolved = true), ref) + def onResolveSuccess(ref: ActorRef) = destination tell (persistent.update(resolved = true), ref) def onResolveFailure() = destination tell (persistent, sdr) } +/** + * [[Channel]] specialization used by [[PersistentChannel]] to deliver stored messages. + */ +private class NoPrepChannel(channelId: String) extends Channel(Some(channelId)) { + override private[akka] def prepareDelivery(persistent: PersistentRepr) = persistent +} diff --git a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala index e645b47ba7..d74d51ffce 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala @@ -64,11 +64,11 @@ private[persistence] trait Eventsourced extends Processor { private val persistingEvents: State = new State { def aroundReceive(receive: Receive, message: Any) = message match { case PersistentBatch(b) ⇒ { - b.foreach(deleteMessage) + b.foreach(p ⇒ deleteMessage(p, true)) throw new UnsupportedOperationException("Persistent command batches not supported") } - case p: PersistentImpl ⇒ { - deleteMessage(p) + case p: PersistentRepr ⇒ { + deleteMessage(p, true) throw new UnsupportedOperationException("Persistent commands not supported") } case WriteSuccess(p) if identical(p.payload, persistInvocations.head._1) ⇒ { @@ -95,10 +95,10 @@ private[persistence] trait Eventsourced extends Processor { } private var persistInvocations: List[(Any, Any ⇒ Unit)] = Nil - private var persistentEventBatch: List[PersistentImpl] = Nil + private var persistentEventBatch: List[PersistentRepr] = Nil private var currentState: State = recovering - private val processorStash = createProcessorStash + private val processorStash = createStash() /** * Asynchronously persists `event`. On successful persistence, `handler` is called with the @@ -124,12 +124,13 @@ private[persistence] trait Eventsourced extends Processor { */ final def persist[A](event: A)(handler: A ⇒ Unit): Unit = { persistInvocations = (event, handler.asInstanceOf[Any ⇒ Unit]) :: persistInvocations - persistentEventBatch = PersistentImpl(event) :: persistentEventBatch + persistentEventBatch = PersistentRepr(event) :: persistentEventBatch } /** * Asynchronously persists `events` in specified order. This is equivalent to calling - * `persist[A](event: A)(handler: A => Unit)` multiple times with the same `handler`. + * `persist[A](event: A)(handler: A => Unit)` multiple times with the same `handler`, + * except that `events` are persisted atomically with this method. * * @param events events to be persisted. * @param handler handler for each persisted `events` @@ -211,9 +212,7 @@ trait EventsourcedProcessor extends Processor with Eventsourced { } /** - * Java API. - * - * An event sourced processor. + * Java API: an event sourced processor. */ abstract class UntypedEventsourcedProcessor extends UntypedProcessor with Eventsourced { final def onReceive(message: Any) = initialBehavior(message) @@ -227,9 +226,7 @@ abstract class UntypedEventsourcedProcessor extends UntypedProcessor with Events } /** - * Java API. - * - * Asynchronously persists `event`. On successful persistence, `handler` is called with the + * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the * persisted event. It is guaranteed that no new commands will be received by a processor * between a call to `persist` and the execution of its `handler`. This also holds for * multiple `persist` calls per received command. Internally, this is achieved by stashing new @@ -254,10 +251,9 @@ abstract class UntypedEventsourcedProcessor extends UntypedProcessor with Events persist(event)(event ⇒ handler(event)) /** - * Java API. - * - * Asynchronously persists `events` in specified order. This is equivalent to calling - * `persist[A](event: A, handler: Procedure[A])` multiple times with the same `handler`. + * Java API: asynchronously persists `events` in specified order. This is equivalent to calling + * `persist[A](event: A, handler: Procedure[A])` multiple times with the same `handler`, + * except that `events` are persisted atomically with this method. * * @param events events to be persisted. * @param handler handler for each persisted `events` @@ -266,9 +262,7 @@ abstract class UntypedEventsourcedProcessor extends UntypedProcessor with Events persist(Util.immutableSeq(events))(event ⇒ handler(event)) /** - * Java API. - * - * Replay handler that receives persisted events during recovery. If a state snapshot + * Java API: replay handler that receives persisted events during recovery. If a state snapshot * has been captured and saved, this handler will receive a [[SnapshotOffer]] message * followed by events that are younger than the offered snapshot. * @@ -281,9 +275,7 @@ abstract class UntypedEventsourcedProcessor extends UntypedProcessor with Events def onReceiveReplay(msg: Any): Unit /** - * Java API. - * - * Command handler. Typically validates commands against current state (and/or by + * Java API: command handler. Typically validates commands against current state (and/or by * communication with other actors). On successful validation, one or more events are * derived from a command and these events are then persisted by calling `persist`. * Commands sent to event sourced processors must not be [[Persistent]] or @@ -291,4 +283,4 @@ abstract class UntypedEventsourcedProcessor extends UntypedProcessor with Events * thrown by the processor. */ def onReceiveCommand(msg: Any): Unit -} \ No newline at end of file +} diff --git a/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala b/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala index 81b7e21470..014b10d67d 100644 --- a/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala +++ b/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala @@ -13,12 +13,12 @@ import akka.actor._ */ private[persistence] object JournalProtocol { /** - * Instructs a journal to mark the `persistent` message as deleted. - * A persistent message marked as deleted is not replayed during recovery. - * - * @param persistent persistent message. + * Instructs a journal to delete a persistent message identified by `processorId` + * and `sequenceNr`. If `physical` is set to `false`, the persistent message is + * marked as deleted in the journal, otherwise it is physically deleted from the + * journal. */ - case class Delete(persistent: Persistent) + case class Delete(processorId: String, sequenceNr: Long, physical: Boolean) /** * Instructs a journal to persist a sequence of messages. @@ -26,7 +26,7 @@ private[persistence] object JournalProtocol { * @param persistentBatch batch of messages to be persisted. * @param processor requesting processor. */ - case class WriteBatch(persistentBatch: immutable.Seq[PersistentImpl], processor: ActorRef) + case class WriteBatch(persistentBatch: immutable.Seq[PersistentRepr], processor: ActorRef) /** * Instructs a journal to persist a message. @@ -34,14 +34,14 @@ private[persistence] object JournalProtocol { * @param persistent message to be persisted. * @param processor requesting processor. */ - case class Write(persistent: PersistentImpl, processor: ActorRef) + case class Write(persistent: PersistentRepr, processor: ActorRef) /** * Reply message to a processor that `persistent` message has been successfully journaled. * * @param persistent persistent message. */ - case class WriteSuccess(persistent: PersistentImpl) + case class WriteSuccess(persistent: PersistentRepr) /** * Reply message to a processor that `persistent` message could not be journaled. @@ -49,7 +49,7 @@ private[persistence] object JournalProtocol { * @param persistent persistent message. * @param cause failure cause. */ - case class WriteFailure(persistent: PersistentImpl, cause: Throwable) + case class WriteFailure(persistent: PersistentRepr, cause: Throwable) /** * Instructs a journal to loop a `message` back to `processor`, without persisting the @@ -83,7 +83,7 @@ private[persistence] object JournalProtocol { * * @param persistent persistent message. */ - case class Replayed(persistent: PersistentImpl) + case class Replayed(persistent: PersistentRepr) /** * Reply message to a processor that all `persistent` messages have been replayed. diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala index 075a2777a0..474b2bd3d2 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala @@ -12,6 +12,8 @@ import scala.collection.immutable import akka.actor.ActorRef import akka.japi.Util.immutableSeq +import akka.persistence.serialization.Message + /** * Persistent message. */ @@ -34,18 +36,11 @@ sealed abstract class Persistent { * Creates a new persistent message with the specified `payload`. */ def withPayload(payload: Any): Persistent - - /** - * Called by [[Channel]] destinations to confirm the receipt of a persistent message. - */ - def confirm(): Unit } object Persistent { /** - * Java API. - * - * Creates a new persistent message. Must only be used outside processors. + * Java API: creates a new persistent message. Must only be used outside processors. * * @param payload payload of new persistent message. */ @@ -53,9 +48,7 @@ object Persistent { create(payload, null) /** - * Java API. - * - * Creates a new persistent message, derived from the specified current message. The current + * Java API: creates a new persistent message, derived from the specified current message. The current * message can be obtained inside a [[Processor]] by calling `getCurrentPersistentMessage()`. * * @param payload payload of new persistent message. @@ -73,68 +66,212 @@ object Persistent { * @param currentPersistentMessage optional current persistent message, defaults to `None`. */ def apply(payload: Any)(implicit currentPersistentMessage: Option[Persistent] = None): Persistent = - currentPersistentMessage.map(_.withPayload(payload)).getOrElse(PersistentImpl(payload)) + currentPersistentMessage.map(_.withPayload(payload)).getOrElse(PersistentRepr(payload)) /** - * Persistent message extractor. + * [[Persistent]] extractor. */ def unapply(persistent: Persistent): Option[(Any, Long)] = Some((persistent.payload, persistent.sequenceNr)) } +/** + * Persistent message that has been delivered by a [[Channel]] or [[PersistentChannel]]. Channel + * destinations that receive messages of this type can confirm their receipt by calling [[confirm]]. + */ +sealed abstract class ConfirmablePersistent extends Persistent { + /** + * Called by [[Channel]] and [[PersistentChannel]] destinations to confirm the receipt of a + * persistent message. + */ + def confirm(): Unit +} + +object ConfirmablePersistent { + /** + * [[ConfirmablePersistent]] extractor. + */ + def unapply(persistent: ConfirmablePersistent): Option[(Any, Long)] = + Some((persistent.payload, persistent.sequenceNr)) +} + /** * Instructs a [[Processor]] to atomically write the contained [[Persistent]] messages to the * journal. The processor receives the written messages individually as [[Persistent]] messages. * During recovery, they are also replayed individually. */ -case class PersistentBatch(persistentBatch: immutable.Seq[Persistent]) { +case class PersistentBatch(persistentBatch: immutable.Seq[Persistent]) extends Message { /** * INTERNAL API. */ - private[persistence] def persistentImplList: List[PersistentImpl] = - persistentBatch.toList.asInstanceOf[List[PersistentImpl]] + private[persistence] def persistentReprList: List[PersistentRepr] = + persistentBatch.toList.asInstanceOf[List[PersistentRepr]] +} + +/** + * Plugin API: representation of a persistent message in the journal plugin API. + * + * @see[[SyncWriteJournal]] + * @see[[AsyncWriteJournal]] + * @see[[AsyncReplay]] + */ +trait PersistentRepr extends Persistent with Message { + import scala.collection.JavaConverters._ + + /** + * This persistent message's payload. + */ + def payload: Any + + /** + * This persistent message's seuence number. + */ + def sequenceNr: Long + + /** + * Id of processor that journals the message + */ + def processorId: String + + /** + * `true` if this message is marked as deleted. + */ + def deleted: Boolean + + /** + * `true` by default, `false` for replayed messages. Set to `true` by a channel if this + * message is replayed and its sender reference was resolved. Channels use this field to + * avoid redundant sender reference resolutions. + */ + def resolved: Boolean + + /** + * Channel ids of delivery confirmations that are available for this message. Only non-empty + * for replayed messages. + */ + def confirms: immutable.Seq[String] + + /** + * Java API, Plugin API: channel ids of delivery confirmations that are available for this + * message. Only non-empty for replayed messages. + */ + def getConfirms: JList[String] = confirms.asJava + + /** + * `true` only if this message has been delivered by a channel. + */ + def confirmable: Boolean + + /** + * Delivery confirmation message. + */ + def confirmMessage: Confirm + + /** + * Delivery confirmation message. + */ + def confirmTarget: ActorRef + + /** + * Sender of this message. + */ + def sender: ActorRef + + private[persistence] def prepareWrite(sender: ActorRef): PersistentRepr + + private[persistence] def update( + sequenceNr: Long = sequenceNr, + processorId: String = processorId, + deleted: Boolean = deleted, + resolved: Boolean = resolved, + confirms: immutable.Seq[String] = confirms, + confirmMessage: Confirm = confirmMessage, + confirmTarget: ActorRef = confirmTarget): PersistentRepr +} + +object PersistentRepr { + /** + * Plugin API: value of an undefined processor or channel id. + */ + val Undefined = "" + + /** + * Plugin API. + */ + def apply( + payload: Any, + sequenceNr: Long = 0L, + processorId: String = PersistentRepr.Undefined, + deleted: Boolean = false, + resolved: Boolean = true, + confirms: immutable.Seq[String] = Nil, + confirmable: Boolean = false, + confirmMessage: Confirm = null, + confirmTarget: ActorRef = null, + sender: ActorRef = null) = + if (confirmable) ConfirmablePersistentImpl(payload, sequenceNr, processorId, deleted, resolved, confirms, confirmMessage, confirmTarget, sender) + else PersistentImpl(payload, sequenceNr, processorId, deleted, confirms, sender) + + /** + * Java API, Plugin API. + */ + def create = apply _ } object PersistentBatch { /** - * JAVA API. + * Java API. */ def create(persistentBatch: JIterable[Persistent]) = PersistentBatch(immutableSeq(persistentBatch)) } /** - * Plugin API. - * - * Internal [[Persistent]] message representation. - * - * @param processorId Id of processor that journaled the message. - * @param channelId Id of last channel that delivered the message to a destination. - * @param sender Serialized sender reference. - * @param deleted `true` if this message is marked as deleted. - * @param resolved `true` by default, `false` for replayed messages. Set to `true` by a channel if this - * message is replayed and its sender reference was resolved. Channels use this field to - * avoid redundant sender reference resolutions. - * @param confirms Channel ids of delivery confirmations that are available for this message. Only non-empty - * for replayed messages. - * @param confirmTarget Delivery confirmation target. - * @param confirmMessage Delivery confirmation message. - * - * @see [[Processor]] - * @see [[Channel]] - * @see [[Deliver]] + * INTERNAL API. */ -case class PersistentImpl( +private[persistence] case class PersistentImpl( payload: Any, - sequenceNr: Long = 0L, - processorId: String = PersistentImpl.Undefined, - channelId: String = PersistentImpl.Undefined, - deleted: Boolean = false, - resolved: Boolean = true, - confirms: Seq[String] = Nil, - confirmMessage: Confirm = null, - confirmTarget: ActorRef = null, - sender: ActorRef = null) extends Persistent { + sequenceNr: Long, + processorId: String, + deleted: Boolean, + confirms: immutable.Seq[String], + sender: ActorRef) extends Persistent with PersistentRepr { + + def withPayload(payload: Any): Persistent = + copy(payload = payload) + + def prepareWrite(sender: ActorRef) = + copy(sender = sender) + + def update( + sequenceNr: Long, + processorId: String, + deleted: Boolean, + resolved: Boolean, + confirms: immutable.Seq[String], + confirmMessage: Confirm, + confirmTarget: ActorRef) = + copy(sequenceNr = sequenceNr, processorId = processorId, deleted = deleted, confirms = confirms) + + val resolved: Boolean = false + val confirmable: Boolean = false + val confirmMessage: Confirm = null + val confirmTarget: ActorRef = null +} + +/** + * INTERNAL API. + */ +private[persistence] case class ConfirmablePersistentImpl( + payload: Any, + sequenceNr: Long, + processorId: String, + deleted: Boolean, + resolved: Boolean, + confirms: immutable.Seq[String], + confirmMessage: Confirm, + confirmTarget: ActorRef, + sender: ActorRef) extends ConfirmablePersistent with PersistentRepr { def withPayload(payload: Any): Persistent = copy(payload = payload) @@ -142,41 +279,23 @@ case class PersistentImpl( def confirm(): Unit = if (confirmTarget != null) confirmTarget ! confirmMessage - import scala.collection.JavaConverters._ + def confirmable = true - /** - * Java Plugin API. - */ - def getConfirms: JList[String] = confirms.asJava + def prepareWrite(sender: ActorRef) = + copy(sender = sender, resolved = false, confirmMessage = null, confirmTarget = null) - private[persistence] def prepareWrite(sender: ActorRef) = - copy(sender = sender, resolved = false, confirmTarget = null, confirmMessage = null) + def update(sequenceNr: Long, processorId: String, deleted: Boolean, resolved: Boolean, confirms: immutable.Seq[String], confirmMessage: Confirm, confirmTarget: ActorRef) = + copy(sequenceNr = sequenceNr, processorId = processorId, deleted = deleted, resolved = resolved, confirms = confirms, confirmMessage = confirmMessage, confirmTarget = confirmTarget) } -object PersistentImpl { - val Undefined = "" - - /** - * Java Plugin API. - */ - def create(payload: Any, sequenceNr: Long, processorId: String, channelId: String, deleted: Boolean, resolved: Boolean, confirms: Seq[String], confirmMessage: Confirm, confirmTarget: ActorRef, sender: ActorRef): PersistentImpl = - PersistentImpl(payload, sequenceNr, processorId, channelId, deleted, resolved, confirms, confirmMessage, confirmTarget, sender) +private[persistence] object ConfirmablePersistentImpl { + def apply(persistent: PersistentRepr, confirmMessage: Confirm, confirmTarget: ActorRef): ConfirmablePersistentImpl = + ConfirmablePersistentImpl(persistent.payload, persistent.sequenceNr, persistent.processorId, persistent.deleted, persistent.resolved, persistent.confirms, confirmMessage, confirmTarget, persistent.sender) } /** - * Sent to a [[Processor]] when a journal failed to write a [[Persistent]] message. If - * not handled, an `akka.actor.ActorKilledException` is thrown by that processor. + * INTERNAL API. * - * @param payload payload of the persistent message. - * @param sequenceNr sequence number of the persistent message. - * @param cause failure cause. + * Message to confirm the receipt of a [[ConfirmablePersistent]] message. */ -case class PersistenceFailure(payload: Any, sequenceNr: Long, cause: Throwable) - -/** - * Internal API. - * - * Message to confirm the receipt of a persistent message (sent via a [[Channel]]). - */ -@SerialVersionUID(1L) -private[persistence] case class Confirm(processorId: String, sequenceNr: Long, channelId: String) +private[persistence] case class Confirm(processorId: String, sequenceNr: Long, channelId: String) extends Message diff --git a/akka-persistence/src/main/scala/akka/persistence/Processor.scala b/akka-persistence/src/main/scala/akka/persistence/Processor.scala index 251ef053c4..460d70e2ee 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Processor.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Processor.scala @@ -4,8 +4,6 @@ package akka.persistence -import scala.collection.immutable - import akka.actor._ import akka.dispatch._ @@ -53,7 +51,7 @@ import akka.dispatch._ * @see [[Recover]] * @see [[PersistentBatch]] */ -trait Processor extends Actor with Stash { +trait Processor extends Actor with Stash with StashFactory { import JournalProtocol._ import SnapshotProtocol._ @@ -155,8 +153,8 @@ trait Processor extends Actor with Stash { } } case LoopSuccess(m) ⇒ process(receive, m) - case p: PersistentImpl ⇒ journal forward Write(p.copy(processorId = processorId, sequenceNr = nextSequenceNr()), self) - case pb: PersistentBatch ⇒ journal forward WriteBatch(pb.persistentImplList.map(_.copy(processorId = processorId, sequenceNr = nextSequenceNr())), self) + case p: PersistentRepr ⇒ journal forward Write(p.update(processorId = processorId, sequenceNr = nextSequenceNr()), self) + case pb: PersistentBatch ⇒ journal forward WriteBatch(pb.persistentReprList.map(_.update(processorId = processorId, sequenceNr = nextSequenceNr())), self) case m ⇒ journal forward Loop(m, self) } } @@ -241,9 +239,50 @@ trait Processor extends Actor with Stash { * recovery. This method is usually called inside `preRestartProcessor` when a persistent message * caused an exception. Processors that want to re-receive that persistent message during recovery * should not call this method. + * + * @param persistent persistent message to be marked as deleted. + * @throws IllegalArgumentException if `persistent` message has not been persisted by this + * processor. */ def deleteMessage(persistent: Persistent): Unit = { - journal ! Delete(persistent) + deleteMessage(persistent, false) + } + + /** + * Deletes a `persistent` message. If `physical` is set to `false` (default), the persistent + * message is marked as deleted in the journal, otherwise it is physically deleted from the + * journal. A deleted message is not replayed during recovery. This method is usually called + * inside `preRestartProcessor` when a persistent message caused an exception. Processors that + * want to re-receive that persistent message during recovery should not call this method. + * + * @param persistent persistent message to be deleted. + * @param physical if `false` (default), the message is marked as deleted, otherwise it is + * physically deleted. + * @throws IllegalArgumentException if `persistent` message has not been persisted by this + * processor. + */ + def deleteMessage(persistent: Persistent, physical: Boolean): Unit = { + val impl = persistent.asInstanceOf[PersistentRepr] + if (impl.processorId != processorId) + throw new IllegalArgumentException( + s"persistent message to be deleted (processor id = [${impl.processorId}], sequence number = [${impl.sequenceNr}]) " + + s"has not been persisted by this processor (processor id = [${processorId}])") + else deleteMessage(impl.sequenceNr, physical) + } + + /** + * Deletes a persistent message identified by `sequenceNr`. If `physical` is set to `false`, + * the persistent message is marked as deleted in the journal, otherwise it is physically + * deleted from the journal. A deleted message is not replayed during recovery. This method + * is usually called inside `preRestartProcessor` when a persistent message caused an exception. + * Processors that want to re-receive that persistent message during recovery should not call + * this method. + * + * @param sequenceNr sequence number of the persistent message to be deleted. + * @param physical if `false`, the message is marked as deleted, otherwise it is physically deleted. + */ + def deleteMessage(sequenceNr: Long, physical: Boolean): Unit = { + journal ! Delete(processorId, sequenceNr, physical) } /** @@ -351,71 +390,25 @@ trait Processor extends Actor with Stash { case _ ⇒ true } - private val processorStash = - createProcessorStash + private val processorStash = createStash() private def currentEnvelope: Envelope = context.asInstanceOf[ActorCell].currentMessage - - /** - * INTERNAL API. - */ - private[persistence] def createProcessorStash = new ProcessorStash { - var theStash = Vector.empty[Envelope] - - def stash(): Unit = - theStash :+= currentEnvelope - - def prepend(others: immutable.Seq[Envelope]): Unit = - others.reverseIterator.foreach(env ⇒ theStash = env +: theStash) - - def unstash(): Unit = try { - if (theStash.nonEmpty) { - mailbox.enqueueFirst(self, theStash.head) - theStash = theStash.tail - } - } - - def unstashAll(): Unit = try { - val i = theStash.reverseIterator - while (i.hasNext) mailbox.enqueueFirst(self, i.next()) - } finally { - theStash = Vector.empty[Envelope] - } - } } /** - * INTERNAL API. + * Sent to a [[Processor]] when a journal failed to write a [[Persistent]] message. If + * not handled, an `akka.actor.ActorKilledException` is thrown by that processor. * - * Processor specific stash used internally to avoid interference with user stash. + * @param payload payload of the persistent message. + * @param sequenceNr sequence number of the persistent message. + * @param cause failure cause. */ -private[persistence] trait ProcessorStash { - /** - * Appends the current message to this stash. - */ - def stash() - - /** - * Prepends `others` to this stash. - */ - def prepend(others: immutable.Seq[Envelope]) - - /** - * Unstashes a single message from this stash. - */ - def unstash() - - /** - * Unstashes all messages from this stash. - */ - def unstashAll() -} +case class PersistenceFailure(payload: Any, sequenceNr: Long, cause: Throwable) /** - * Java API. - * - * An actor that persists (journals) messages of type [[Persistent]]. Messages of other types are not persisted. + * Java API: an actor that persists (journals) messages of type [[Persistent]]. Messages of other types + * are not persisted. * * {{{ * import akka.persistence.Persistent; @@ -468,12 +461,8 @@ private[persistence] trait ProcessorStash { * @see [[PersistentBatch]] */ abstract class UntypedProcessor extends UntypedActor with Processor { - /** - * Java API. - * - * Returns the current persistent message or `null` if there is none. + * Java API. returns the current persistent message or `null` if there is none. */ def getCurrentPersistentMessage = currentPersistentMessage.getOrElse(null) } - diff --git a/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala b/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala index 9b1f4713ba..725ae45d7a 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ @@ -82,9 +83,7 @@ object SnapshotSelectionCriteria { } /** - * Plugin API. - * - * A selected snapshot matching [[SnapshotSelectionCriteria]]. + * Plugin API: a selected snapshot matching [[SnapshotSelectionCriteria]]. * * @param metadata snapshot metadata. * @param snapshot snapshot. diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala index 0bab9f1b6b..ea022b9abb 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncReplay.scala @@ -6,7 +6,7 @@ package akka.persistence.journal import scala.concurrent.Future -import akka.persistence.PersistentImpl +import akka.persistence.PersistentRepr /** * Asynchronous message replay interface. @@ -14,17 +14,15 @@ import akka.persistence.PersistentImpl trait AsyncReplay { //#journal-plugin-api /** - * Plugin API. - * - * Asynchronously replays persistent messages. Implementations replay a message - * by calling `replayCallback`. The returned future must be completed when all - * messages (matching the sequence number bounds) have been replayed. The future - * `Long` value must be the highest stored sequence number in the journal for the - * specified processor. The future must be completed with a failure if any of - * the persistent messages could not be replayed. + * Plugin API: asynchronously replays persistent messages. Implementations replay + * a message by calling `replayCallback`. The returned future must be completed + * when all messages (matching the sequence number bounds) have been replayed. The + * future `Long` value must be the highest stored sequence number in the journal + * for the specified processor. The future must be completed with a failure if any + * of the persistent messages could not be replayed. * * The `replayCallback` must also be called with messages that have been marked - * as deleted. In this case a replayed message's `deleted` field must be set to + * as deleted. In this case a replayed message's `deleted` method must return * `true`. * * The channel ids of delivery confirmations that are available for a replayed @@ -39,6 +37,6 @@ trait AsyncReplay { * @see [[AsyncWriteJournal]] * @see [[SyncWriteJournal]] */ - def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: PersistentImpl ⇒ Unit): Future[Long] + def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: PersistentRepr ⇒ Unit): Future[Long] //#journal-plugin-api } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala index b10328b398..d86123b705 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ @@ -39,7 +40,7 @@ trait AsyncWriteJournal extends Actor with AsyncReplay { val csdr = sender val cctr = resequencerCounter val psdr = if (sender.isInstanceOf[PromiseActorRef]) context.system.deadLetters else sender - def resequence(f: PersistentImpl ⇒ Any) = persistentBatch.zipWithIndex.foreach { + def resequence(f: PersistentRepr ⇒ Any) = persistentBatch.zipWithIndex.foreach { case (p, i) ⇒ resequencer ! Desequenced(f(p), cctr + i, processor, csdr) } writeBatchAsync(persistentBatch.map(_.prepareWrite(psdr))) onComplete { @@ -66,9 +67,9 @@ trait AsyncWriteJournal extends Actor with AsyncReplay { } context.system.eventStream.publish(c) } - case Delete(persistent: PersistentImpl) ⇒ { - deleteAsync(persistent) onComplete { - case Success(_) ⇒ // TODO: publish success to event stream + case d @ Delete(processorId, sequenceNr, physical) ⇒ { + deleteAsync(processorId, sequenceNr, physical) onComplete { + case Success(_) ⇒ context.system.eventStream.publish(d) case Failure(e) ⇒ // TODO: publish failure to event stream } } @@ -80,31 +81,26 @@ trait AsyncWriteJournal extends Actor with AsyncReplay { //#journal-plugin-api /** - * Plugin API. - * - * Asynchronously writes a `persistent` message to the journal. + * Plugin API: asynchronously writes a `persistent` message to the journal. */ - def writeAsync(persistent: PersistentImpl): Future[Unit] + def writeAsync(persistent: PersistentRepr): Future[Unit] /** - * Plugin API. - * - * Asynchronously writes a batch of persistent messages to the journal. The batch write - * must be atomic i.e. either all persistent messages in the batch are written or none. + * Plugin API: asynchronously writes a batch of persistent messages to the journal. + * The batch write must be atomic i.e. either all persistent messages in the batch + * are written or none. */ - def writeBatchAsync(persistentBatch: immutable.Seq[PersistentImpl]): Future[Unit] + def writeBatchAsync(persistentBatch: immutable.Seq[PersistentRepr]): Future[Unit] /** - * Plugin API. - * - * Asynchronously marks a `persistent` message as deleted. + * Plugin API: asynchronously deletes a persistent message. If `physical` is set to + * `false`, the persistent message is marked as deleted, otherwise it is physically + * deleted. */ - def deleteAsync(persistent: PersistentImpl): Future[Unit] + def deleteAsync(processorId: String, sequenceNr: Long, physical: Boolean): Future[Unit] /** - * Plugin API. - * - * Asynchronously writes a delivery confirmation to the journal. + * Plugin API: asynchronously writes a delivery confirmation to the journal. */ def confirmAsync(processorId: String, sequenceNr: Long, channelId: String): Future[Unit] //#journal-plugin-api diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala index c0310e73dd..77ca00ca81 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ @@ -48,8 +49,9 @@ trait SyncWriteJournal extends Actor with AsyncReplay { confirm(processorId, sequenceNr, channelId) context.system.eventStream.publish(c) // TODO: turn off by default and allow to turn on by configuration } - case Delete(persistent: PersistentImpl) ⇒ { - delete(persistent) + case d @ Delete(processorId, sequenceNr, physical) ⇒ { + delete(processorId, sequenceNr, physical) + context.system.eventStream.publish(d) // TODO: turn off by default and allow to turn on by configuration } case Loop(message, processor) ⇒ { processor forward LoopSuccess(message) @@ -58,31 +60,26 @@ trait SyncWriteJournal extends Actor with AsyncReplay { //#journal-plugin-api /** - * Plugin API. - * - * Synchronously writes a `persistent` message to the journal. + * Plugin API: synchronously writes a `persistent` message to the journal. */ - def write(persistent: PersistentImpl): Unit + def write(persistent: PersistentRepr): Unit /** - * Plugin API. - * - * Synchronously writes a batch of persistent messages to the journal. The batch write - * must be atomic i.e. either all persistent messages in the batch are written or none. + * Plugin API: synchronously writes a batch of persistent messages to the journal. + * The batch write must be atomic i.e. either all persistent messages in the batch + * are written or none. */ - def writeBatch(persistentBatch: immutable.Seq[PersistentImpl]): Unit + def writeBatch(persistentBatch: immutable.Seq[PersistentRepr]): Unit /** - * Plugin API. - * - * Synchronously marks a `persistent` message as deleted. + * Plugin API: synchronously deletes a persistent message. If `physical` is set to + * `false`, the persistent message is marked as deleted, otherwise it is physically + * deleted. */ - def delete(persistent: PersistentImpl): Unit + def delete(processorId: String, sequenceNr: Long, physical: Boolean): Unit /** - * Plugin API. - * - * Synchronously writes a delivery confirmation to the journal. + * Plugin API: synchronously writes a delivery confirmation to the journal. */ def confirm(processorId: String, sequenceNr: Long, channelId: String): Unit //#journal-plugin-api diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala index c6f28e56b8..92317b45ee 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala @@ -27,19 +27,19 @@ private[persistence] class InmemJournal extends AsyncWriteJournal { import InmemStore._ - def writeAsync(persistent: PersistentImpl): Future[Unit] = + def writeAsync(persistent: PersistentRepr): Future[Unit] = (store ? Write(persistent)).mapTo[Unit] - def writeBatchAsync(persistentBatch: immutable.Seq[PersistentImpl]): Future[Unit] = + def writeBatchAsync(persistentBatch: immutable.Seq[PersistentRepr]): Future[Unit] = (store ? WriteBatch(persistentBatch)).mapTo[Unit] - def deleteAsync(persistent: PersistentImpl): Future[Unit] = - (store ? Delete(persistent)).mapTo[Unit] + def deleteAsync(processorId: String, sequenceNr: Long, physical: Boolean): Future[Unit] = + (store ? Delete(processorId, sequenceNr, physical)).mapTo[Unit] def confirmAsync(processorId: String, sequenceNr: Long, channelId: String): Future[Unit] = (store ? Confirm(processorId, sequenceNr, channelId)).mapTo[Unit] - def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentImpl) ⇒ Unit): Future[Long] = + def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentRepr) ⇒ Unit): Future[Long] = (store ? Replay(processorId, fromSequenceNr, toSequenceNr, replayCallback)).mapTo[Long] } @@ -47,7 +47,7 @@ private[persistence] class InmemStore extends Actor { import InmemStore._ // processor id => persistent message - var messages = Map.empty[String, Vector[PersistentImpl]] + var messages = Map.empty[String, Vector[PersistentRepr]] def receive = { case Write(p) ⇒ @@ -56,11 +56,14 @@ private[persistence] class InmemStore extends Actor { case WriteBatch(pb) ⇒ pb.foreach(add) success() - case Delete(p) ⇒ - update(p.processorId, p.sequenceNr)(_.copy(deleted = true)) + case Delete(pid, snr, false) ⇒ + update(pid, snr)(_.update(deleted = true)) + success() + case Delete(pid, snr, true) ⇒ + delete(pid, snr) success() case Confirm(pid, snr, cid) ⇒ - update(pid, snr)(p ⇒ p.copy(confirms = cid +: p.confirms)) + update(pid, snr)(p ⇒ p.update(confirms = cid +: p.confirms)) success() case Replay(pid, fromSnr, toSnr, callback) ⇒ { for { @@ -76,16 +79,21 @@ private[persistence] class InmemStore extends Actor { private def success(reply: Any = ()) = sender ! reply - private def add(p: PersistentImpl) = messages = messages + (messages.get(p.processorId) match { + private def add(p: PersistentRepr) = messages = messages + (messages.get(p.processorId) match { case Some(ms) ⇒ p.processorId -> (ms :+ p) case None ⇒ p.processorId -> Vector(p) }) - private def update(pid: String, snr: Long)(f: PersistentImpl ⇒ PersistentImpl) = messages = messages.get(pid) match { + private def update(pid: String, snr: Long)(f: PersistentRepr ⇒ PersistentRepr) = messages = messages.get(pid) match { case Some(ms) ⇒ messages + (pid -> ms.map(sp ⇒ if (sp.sequenceNr == snr) f(sp) else sp)) case None ⇒ messages } + private def delete(pid: String, snr: Long) = messages = messages.get(pid) match { + case Some(ms) ⇒ messages + (pid -> ms.filterNot(_.sequenceNr == snr)) + case None ⇒ messages + } + private def maxSequenceNr(pid: String): Long = { val snro = for { ms ← messages.get(pid) @@ -96,9 +104,9 @@ private[persistence] class InmemStore extends Actor { } private[persistence] object InmemStore { - case class Write(p: PersistentImpl) - case class WriteBatch(pb: Seq[PersistentImpl]) - case class Delete(p: PersistentImpl) + case class Write(p: PersistentRepr) + case class WriteBatch(pb: Seq[PersistentRepr]) + case class Delete(processorId: String, sequenceNr: Long, physical: Boolean) case class Confirm(processorId: String, sequenceNr: Long, channelId: String) - case class Replay(processorId: String, fromSequenceNr: Long, toSequenceNr: Long, replayCallback: (PersistentImpl) ⇒ Unit) + case class Replay(processorId: String, fromSequenceNr: Long, toSequenceNr: Long, replayCallback: (PersistentRepr) ⇒ Unit) } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala index 6ac8157fc6..db99821bd9 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncReplay.scala @@ -11,18 +11,16 @@ import scala.concurrent.Future import akka.actor.Actor import akka.japi.Procedure import akka.persistence.journal.{ AsyncReplay ⇒ SAsyncReplay } -import akka.persistence.PersistentImpl +import akka.persistence.PersistentRepr /** - * Java API. - * - * Asynchronous message replay interface. + * Java API: asynchronous message replay interface. */ abstract class AsyncReplay extends SAsyncReplay with AsyncReplayPlugin { this: Actor ⇒ import context.dispatcher - final def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentImpl) ⇒ Unit) = - doReplayAsync(processorId, fromSequenceNr, toSequenceNr, new Procedure[PersistentImpl] { - def apply(p: PersistentImpl) = replayCallback(p) + final def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentRepr) ⇒ Unit) = + doReplayAsync(processorId, fromSequenceNr, toSequenceNr, new Procedure[PersistentRepr] { + def apply(p: PersistentRepr) = replayCallback(p) }).map(_.longValue) } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala index d3d557001f..f158c0fbfb 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala @@ -8,24 +8,22 @@ import scala.collection.immutable import scala.collection.JavaConverters._ import akka.persistence.journal.{ AsyncWriteJournal ⇒ SAsyncWriteJournal } -import akka.persistence.PersistentImpl +import akka.persistence.PersistentRepr /** - * Java API. - * - * Abstract journal, optimized for asynchronous, non-blocking writes. + * Java API: abstract journal, optimized for asynchronous, non-blocking writes. */ abstract class AsyncWriteJournal extends AsyncReplay with SAsyncWriteJournal with AsyncWritePlugin { import context.dispatcher - final def writeAsync(persistent: PersistentImpl) = + final def writeAsync(persistent: PersistentRepr) = doWriteAsync(persistent).map(Unit.unbox) - final def writeBatchAsync(persistentBatch: immutable.Seq[PersistentImpl]) = + final def writeBatchAsync(persistentBatch: immutable.Seq[PersistentRepr]) = doWriteBatchAsync(persistentBatch.asJava).map(Unit.unbox) - final def deleteAsync(persistent: PersistentImpl) = - doDeleteAsync(persistent).map(Unit.unbox) + final def deleteAsync(processorId: String, sequenceNr: Long, physical: Boolean) = + doDeleteAsync(processorId, sequenceNr, physical).map(Unit.unbox) final def confirmAsync(processorId: String, sequenceNr: Long, channelId: String) = doConfirmAsync(processorId, sequenceNr, channelId).map(Unit.unbox) diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala index e91af7ce0e..43f91f19d9 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala @@ -8,22 +8,20 @@ import scala.collection.immutable import scala.collection.JavaConverters._ import akka.persistence.journal.{ SyncWriteJournal ⇒ SSyncWriteJournal } -import akka.persistence.PersistentImpl +import akka.persistence.PersistentRepr /** - * Java API. - * - * Abstract journal, optimized for synchronous writes. + * Java API: abstract journal, optimized for synchronous writes. */ abstract class SyncWriteJournal extends AsyncReplay with SSyncWriteJournal with SyncWritePlugin { - final def write(persistent: PersistentImpl) = + final def write(persistent: PersistentRepr) = doWrite(persistent) - final def writeBatch(persistentBatch: immutable.Seq[PersistentImpl]) = + final def writeBatch(persistentBatch: immutable.Seq[PersistentRepr]) = doWriteBatch(persistentBatch.asJava) - final def delete(persistent: PersistentImpl) = - doDelete(persistent) + final def delete(processorId: String, sequenceNr: Long, physical: Boolean) = + doDelete(processorId, sequenceNr, physical) final def confirm(processorId: String, sequenceNr: Long, channelId: String) = doConfirm(processorId, sequenceNr, channelId) diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala index 1eed3cd9df..6e8404df89 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbJournal.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ @@ -38,14 +39,18 @@ private[leveldb] class LeveldbJournal extends SyncWriteJournal with LeveldbIdMap import Key._ - def write(persistent: PersistentImpl) = + def write(persistent: PersistentRepr) = withBatch(batch ⇒ addToBatch(persistent, batch)) - def writeBatch(persistentBatch: immutable.Seq[PersistentImpl]) = + def writeBatch(persistentBatch: immutable.Seq[PersistentRepr]) = withBatch(batch ⇒ persistentBatch.foreach(persistent ⇒ addToBatch(persistent, batch))) - def delete(persistent: PersistentImpl) { - leveldb.put(keyToBytes(deletionKey(numericId(persistent.processorId), persistent.sequenceNr)), Array.empty[Byte]) + def delete(processorId: String, sequenceNr: Long, physical: Boolean) { + if (physical) + // TODO: delete confirmations and deletion markers, if any. + leveldb.delete(keyToBytes(Key(numericId(processorId), sequenceNr, 0))) + else + leveldb.put(keyToBytes(deletionKey(numericId(processorId), sequenceNr)), Array.empty[Byte]) } def confirm(processorId: String, sequenceNr: Long, channelId: String) { @@ -55,10 +60,10 @@ private[leveldb] class LeveldbJournal extends SyncWriteJournal with LeveldbIdMap def leveldbSnapshot = leveldbReadOptions.snapshot(leveldb.getSnapshot) def leveldbIterator = leveldb.iterator(leveldbSnapshot) - def persistentToBytes(p: PersistentImpl): Array[Byte] = serialization.serialize(p).get - def persistentFromBytes(a: Array[Byte]): PersistentImpl = serialization.deserialize(a, classOf[PersistentImpl]).get + def persistentToBytes(p: PersistentRepr): Array[Byte] = serialization.serialize(p).get + def persistentFromBytes(a: Array[Byte]): PersistentRepr = serialization.deserialize(a, classOf[PersistentRepr]).get - private def addToBatch(persistent: PersistentImpl, batch: WriteBatch): Unit = { + private def addToBatch(persistent: PersistentRepr, batch: WriteBatch): Unit = { val nid = numericId(persistent.processorId) batch.put(keyToBytes(counterKey(nid)), counterToBytes(persistent.sequenceNr)) batch.put(keyToBytes(Key(nid, persistent.sequenceNr, 0)), persistentToBytes(persistent)) diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala index b5326aa5dc..9fe342f15c 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbReplay.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbReplay.scala index d76961fac4..849aed4905 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbReplay.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbReplay.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ @@ -18,14 +19,14 @@ private[persistence] trait LeveldbReplay extends AsyncReplay { this: LeveldbJour private val replayDispatcherId = context.system.settings.config.getString("akka.persistence.journal.leveldb.replay-dispatcher") private val replayDispatcher = context.system.dispatchers.lookup(replayDispatcherId) - def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: PersistentImpl ⇒ Unit): Future[Long] = + def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: PersistentRepr ⇒ Unit): Future[Long] = Future(replay(numericId(processorId), fromSequenceNr: Long, toSequenceNr)(replayCallback))(replayDispatcher) - private def replay(processorId: Int, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: PersistentImpl ⇒ Unit): Long = { + private def replay(processorId: Int, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: PersistentRepr ⇒ Unit): Long = { val iter = leveldbIterator @scala.annotation.tailrec - def go(key: Key, replayCallback: PersistentImpl ⇒ Unit) { + def go(key: Key, replayCallback: PersistentRepr ⇒ Unit) { if (iter.hasNext) { val nextEntry = iter.next() val nextKey = keyFromBytes(nextEntry.getKey) @@ -38,7 +39,7 @@ private[persistence] trait LeveldbReplay extends AsyncReplay { this: LeveldbJour val msg = persistentFromBytes(nextEntry.getValue) val del = deletion(nextKey) val cnf = confirms(nextKey, Nil) - replayCallback(msg.copy(confirms = cnf, deleted = del)) + replayCallback(msg.update(confirms = cnf, deleted = del)) go(nextKey, replayCallback) } } diff --git a/akka-persistence/src/main/scala/akka/persistence/package.scala b/akka-persistence/src/main/scala/akka/persistence/package.scala index ebfcc1efb5..39c165cece 100644 --- a/akka-persistence/src/main/scala/akka/persistence/package.scala +++ b/akka-persistence/src/main/scala/akka/persistence/package.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala index 0da51c0c47..e841e9b1fb 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala @@ -12,43 +12,56 @@ import akka.actor.ExtendedActorSystem import akka.japi.Util.immutableSeq import akka.persistence._ import akka.persistence.serialization.MessageFormats._ +import akka.persistence.serialization.MessageFormats.DeliverMessage.ResolveStrategy import akka.serialization._ /** - * Protobuf serializer for [[Persistent]] and `Confirm` messages. + * Marker trait for all protobuf-serializable messages in `akka.persistence`. + */ +trait Message extends Serializable + +/** + * Protobuf serializer for [[PersistentBatch]], [[PersistentRepr]] and [[Deliver]] messages. */ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { - import PersistentImpl.Undefined + import PersistentRepr.Undefined val PersistentBatchClass = classOf[PersistentBatch] - val PersistentClass = classOf[PersistentImpl] + val PersistentReprClass = classOf[PersistentRepr] + val PersistentImplClass = classOf[PersistentImpl] + val ConfirmablePersistentImplClass = classOf[ConfirmablePersistentImpl] val ConfirmClass = classOf[Confirm] + val DeliverClass = classOf[Deliver] def identifier: Int = 7 def includeManifest: Boolean = true /** - * Serializes [[PersistentBatch]] and [[Persistent]]. Delegates serialization of a - * persistent message's payload to a matching `akka.serialization.Serializer`. + * Serializes [[PersistentBatch]], [[PersistentRepr]] and [[Deliver]] messages. Delegates + * serialization of a persistent message's payload to a matching `akka.serialization.Serializer`. */ def toBinary(o: AnyRef): Array[Byte] = o match { case b: PersistentBatch ⇒ persistentMessageBatchBuilder(b).build().toByteArray - case p: PersistentImpl ⇒ persistentMessageBuilder(p).build().toByteArray + case p: PersistentRepr ⇒ persistentMessageBuilder(p).build().toByteArray case c: Confirm ⇒ confirmMessageBuilder(c).build().toByteArray + case d: Deliver ⇒ deliverMessageBuilder(d).build.toByteArray case _ ⇒ throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass}") } /** - * Deserializes [[PersistentBatch]] and [[Persistent]]. Delegates deserialization of a - * persistent message's payload to a matching `akka.serialization.Serializer`. + * Deserializes [[PersistentBatch]], [[PersistentRepr]] and [[Deliver]] messages. Delegates + * deserialization of a persistent message's payload to a matching `akka.serialization.Serializer`. */ - def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { + def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): Message = manifest match { case None ⇒ persistent(PersistentMessage.parseFrom(bytes)) case Some(c) ⇒ c match { - case PersistentBatchClass ⇒ persistentBatch(PersistentMessageBatch.parseFrom(bytes)) - case PersistentClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) - case ConfirmClass ⇒ confirm(ConfirmMessage.parseFrom(bytes)) - case _ ⇒ throw new IllegalArgumentException(s"Can't deserialize object of type ${c}") + case PersistentImplClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case ConfirmablePersistentImplClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case PersistentReprClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case PersistentBatchClass ⇒ persistentBatch(PersistentMessageBatch.parseFrom(bytes)) + case ConfirmClass ⇒ confirm(ConfirmMessage.parseFrom(bytes)) + case DeliverClass ⇒ deliver(DeliverMessage.parseFrom(bytes)) + case _ ⇒ throw new IllegalArgumentException(s"Can't deserialize object of type ${c}") } } @@ -56,17 +69,27 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { // toBinary helpers // + private def deliverMessageBuilder(deliver: Deliver) = { + val builder = DeliverMessage.newBuilder + builder.setPersistent(persistentMessageBuilder(deliver.persistent.asInstanceOf[PersistentRepr])) + builder.setDestination(Serialization.serializedActorPath(deliver.destination)) + deliver.resolve match { + case Resolve.Off ⇒ builder.setResolve(DeliverMessage.ResolveStrategy.Off) + case Resolve.Sender ⇒ builder.setResolve(DeliverMessage.ResolveStrategy.Sender) + case Resolve.Destination ⇒ builder.setResolve(DeliverMessage.ResolveStrategy.Destination) + } + } + private def persistentMessageBatchBuilder(persistentBatch: PersistentBatch) = { val builder = PersistentMessageBatch.newBuilder - persistentBatch.persistentImplList.foreach(p ⇒ builder.addBatch(persistentMessageBuilder(p))) + persistentBatch.persistentReprList.foreach(p ⇒ builder.addBatch(persistentMessageBuilder(p))) builder } - private def persistentMessageBuilder(persistent: PersistentImpl) = { + private def persistentMessageBuilder(persistent: PersistentRepr) = { val builder = PersistentMessage.newBuilder if (persistent.processorId != Undefined) builder.setProcessorId(persistent.processorId) - if (persistent.channelId != Undefined) builder.setChannelId(persistent.channelId) if (persistent.confirmMessage != null) builder.setConfirmMessage(confirmMessageBuilder(persistent.confirmMessage)) if (persistent.confirmTarget != null) builder.setConfirmTarget(Serialization.serializedActorPath(persistent.confirmTarget)) if (persistent.sender != null) builder.setSender(Serialization.serializedActorPath(persistent.sender)) @@ -77,6 +100,7 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { builder.setSequenceNr(persistent.sequenceNr) builder.setDeleted(persistent.deleted) builder.setResolved(persistent.resolved) + builder.setConfirmable(persistent.confirmable) builder } @@ -102,18 +126,29 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { // fromBinary helpers // + private def deliver(deliverMessage: DeliverMessage): Deliver = { + Deliver( + persistent(deliverMessage.getPersistent), + system.provider.resolveActorRef(deliverMessage.getDestination), + deliverMessage.getResolve match { + case ResolveStrategy.Off ⇒ Resolve.Off + case ResolveStrategy.Sender ⇒ Resolve.Sender + case ResolveStrategy.Destination ⇒ Resolve.Destination + }) + } + private def persistentBatch(persistentMessageBatch: PersistentMessageBatch): PersistentBatch = PersistentBatch(immutableSeq(persistentMessageBatch.getBatchList).map(persistent)) - private def persistent(persistentMessage: PersistentMessage): PersistentImpl = { - PersistentImpl( + private def persistent(persistentMessage: PersistentMessage): PersistentRepr = { + PersistentRepr( payload(persistentMessage.getPayload), persistentMessage.getSequenceNr, if (persistentMessage.hasProcessorId) persistentMessage.getProcessorId else Undefined, - if (persistentMessage.hasChannelId) persistentMessage.getChannelId else Undefined, persistentMessage.getDeleted, persistentMessage.getResolved, immutableSeq(persistentMessage.getConfirmsList), + persistentMessage.getConfirmable, if (persistentMessage.hasConfirmMessage) confirm(persistentMessage.getConfirmMessage) else null, if (persistentMessage.hasConfirmTarget) system.provider.resolveActorRef(persistentMessage.getConfirmTarget) else null, if (persistentMessage.hasSender) system.provider.resolveActorRef(persistentMessage.getSender) else null) diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala index 066c3e16d6..ef18a5ff91 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala index 3b54ed2412..b3422cc5ac 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ @@ -47,9 +48,7 @@ trait SnapshotStore extends Actor { //#snapshot-store-plugin-api /** - * Plugin API. - * - * Asynchronously loads a snapshot. + * Plugin API: asynchronously loads a snapshot. * * @param processorId processor id. * @param criteria selection criteria for loading. @@ -57,9 +56,7 @@ trait SnapshotStore extends Actor { def loadAsync(processorId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] /** - * Plugin API. - * - * Asynchronously saves a snapshot. + * Plugin API: asynchronously saves a snapshot. * * @param metadata snapshot metadata. * @param snapshot snapshot. @@ -67,18 +64,14 @@ trait SnapshotStore extends Actor { def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] /** - * Plugin API. - * - * Called after successful saving of a snapshot. + * Plugin API: called after successful saving of a snapshot. * * @param metadata snapshot metadata. */ def saved(metadata: SnapshotMetadata) /** - * Plugin API. - * - * Deletes the snapshot identified by `metadata`. + * Plugin API: deletes the snapshot identified by `metadata`. * * @param metadata snapshot metadata. */ diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala index 0c095f04cc..d73294dc77 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/japi/SnapshotStore.scala @@ -11,9 +11,7 @@ import akka.persistence._ import akka.persistence.snapshot.{ SnapshotStore ⇒ SSnapshotStore } /** - * Java API. - * - * Abstract snapshot store. + * Java API: abstract snapshot store. */ abstract class SnapshotStore extends SSnapshotStore with SnapshotStorePlugin { import context.dispatcher diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala index 53a8d13f67..d0d27655f9 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/local/LocalSnapshotStore.scala @@ -1,4 +1,5 @@ /** + * Copyright (C) 2009-2013 Typesafe Inc. * Copyright (C) 2012-2013 Eligotech BV. */ diff --git a/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala b/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala index 08838277cb..fb67bace37 100644 --- a/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala @@ -10,9 +10,9 @@ import akka.actor._ import akka.testkit._ object ChannelSpec { - class TestProcessor(name: String) extends NamedProcessor(name) { + class TestProcessor(name: String, channelProps: Props) extends NamedProcessor(name) { val destination = context.actorOf(Props[TestDestination]) - val channel = context.actorOf(Channel.props("channel")) + val channel = context.actorOf(channelProps) def receive = { case m @ Persistent(s: String, _) if s.startsWith("a") ⇒ { @@ -38,6 +38,14 @@ object ChannelSpec { case Persistent(payload, _) ⇒ testActor ! payload } } + + class TestDestinationProcessor(name: String) extends NamedProcessor(name) { + def receive = { + case cp @ ConfirmablePersistent("a", _) ⇒ cp.confirm() + case cp @ ConfirmablePersistent("b", _) ⇒ cp.confirm() + case cp @ ConfirmablePersistent("boom", _) if (recoveryFinished) ⇒ throw new TestException("boom") + } + } } abstract class ChannelSpec(config: Config) extends AkkaSpec(config) with PersistenceSpec with ImplicitSender { @@ -50,46 +58,78 @@ abstract class ChannelSpec(config: Config) extends AkkaSpec(config) with Persist val forwardProbe = TestProbe() val replyProbe = TestProbe() - val processor = system.actorOf(Props(classOf[TestProcessor], name)) + val processor = system.actorOf(Props(classOf[TestProcessor], name, channelProps(s"${name}-channel"))) - system.eventStream.subscribe(confirmProbe.ref, classOf[Confirm]) + subscribeToConfirmation(confirmProbe) processor tell (Persistent("a1"), forwardProbe.ref) processor tell (Persistent("b1"), replyProbe.ref) - forwardProbe.expectMsgPF() { case m @ Persistent("fw: a1", _) ⇒ m.confirm() } - replyProbe.expectMsgPF() { case m @ Persistent("re: b1", _) ⇒ m.confirm() } + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("fw: a1", _) ⇒ m.confirm() } + replyProbe.expectMsgPF() { case m @ ConfirmablePersistent("re: b1", _) ⇒ m.confirm() } - // wait for confirmations to be stored by journal (needed - // for replay so that channels can drop confirmed messages) - confirmProbe.expectMsgType[Confirm] - confirmProbe.expectMsgType[Confirm] + awaitConfirmation(confirmProbe) + awaitConfirmation(confirmProbe) } - def actorRefFor(topLevelName: String) = { + def actorRefFor(topLevelName: String) = extension.system.provider.resolveActorRef(RootActorPath(Address("akka", system.name)) / "user" / topLevelName) - } + + def channelProps(channelId: String): Props = + Channel.props(channelId) + + def subscribeToConfirmation(probe: TestProbe): Unit = + system.eventStream.subscribe(probe.ref, classOf[Confirm]) + + def awaitConfirmation(probe: TestProbe): Unit = + probe.expectMsgType[Confirm] "A channel" must { - "forward un-confirmed messages to destination" in { - val processor = system.actorOf(Props(classOf[TestProcessor], name)) + "forward new messages to destination" in { + val processor = system.actorOf(Props(classOf[TestProcessor], name, channelProps(s"${name}-channel"))) processor ! Persistent("a2") - expectMsgPF() { case m @ Persistent("fw: a2", _) ⇒ m.confirm() } + expectMsgPF() { case m @ ConfirmablePersistent("fw: a2", _) ⇒ m.confirm() } } - "reply un-confirmed messages to senders" in { - val processor = system.actorOf(Props(classOf[TestProcessor], name)) + "reply new messages to senders" in { + val processor = system.actorOf(Props(classOf[TestProcessor], name, channelProps(s"${name}-channel"))) processor ! Persistent("b2") - expectMsgPF() { case m @ Persistent("re: b2", _) ⇒ m.confirm() } + expectMsgPF() { case m @ ConfirmablePersistent("re: b2", _) ⇒ m.confirm() } + } + "forward un-confirmed stored messages to destination during recovery" in { + val confirmProbe = TestProbe() + val forwardProbe = TestProbe() + + subscribeToConfirmation(confirmProbe) + + val processor1 = system.actorOf(Props(classOf[TestProcessor], name, channelProps(s"${name}-channel"))) + + processor1 tell (Persistent("a1"), forwardProbe.ref) + processor1 tell (Persistent("a2"), forwardProbe.ref) + + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("fw: a1", _) ⇒ /* no confirmation */ } + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("fw: a2", _) ⇒ m.confirm() } + + awaitConfirmation(confirmProbe) + + val processor2 = system.actorOf(Props(classOf[TestProcessor], name, channelProps(s"${name}-channel"))) + + processor2 tell (Persistent("a3"), forwardProbe.ref) + + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("fw: a1", _) ⇒ m.confirm() } // sender still valid, no need to resolve + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("fw: a3", _) ⇒ m.confirm() } + + awaitConfirmation(confirmProbe) + awaitConfirmation(confirmProbe) } "must resolve sender references and preserve message order" in { - val channel = system.actorOf(Channel.props()) + val channel = system.actorOf(channelProps("channel-1")) val destination = system.actorOf(Props[TestDestination]) val empty = actorRefFor("testSender") // will be an EmptyLocalActorRef val sender = system.actorOf(Props(classOf[TestReceiver], testActor), "testSender") // replayed message (resolved = false) and invalid sender reference - channel tell (Deliver(PersistentImpl("a", resolved = false), destination, Resolve.Sender), empty) + channel tell (Deliver(PersistentRepr("a", resolved = false), destination, Resolve.Sender), empty) // new messages (resolved = true) and valid sender references channel tell (Deliver(Persistent("b"), destination), sender) @@ -100,13 +140,13 @@ abstract class ChannelSpec(config: Config) extends AkkaSpec(config) with Persist expectMsg("c") } "must resolve destination references and preserve message order" in { - val channel = system.actorOf(Channel.props()) + val channel = system.actorOf(channelProps("channel-2")) val empty = actorRefFor("testDestination") // will be an EmptyLocalActorRef val destination = system.actorOf(Props(classOf[TestReceiver], testActor), "testDestination") // replayed message (resolved = false) and invalid destination reference - channel ! Deliver(PersistentImpl("a", resolved = false), empty, Resolve.Destination) + channel ! Deliver(PersistentRepr("a", resolved = false), empty, Resolve.Destination) // new messages (resolved = true) and valid destination references channel ! Deliver(Persistent("b"), destination) @@ -116,8 +156,130 @@ abstract class ChannelSpec(config: Config) extends AkkaSpec(config) with Persist expectMsg("b") expectMsg("c") } + "support processors as destination" in { + val channel = system.actorOf(channelProps(s"${name}-channel-new")) + val destination = system.actorOf(Props(classOf[TestDestinationProcessor], s"${name}-new")) + val confirmProbe = TestProbe() + + subscribeToConfirmation(confirmProbe) + + channel ! Deliver(Persistent("a"), destination) + + awaitConfirmation(confirmProbe) + } + "support processors as destination that may fail" in { + val channel = system.actorOf(channelProps(s"${name}-channel-new")) + val destination = system.actorOf(Props(classOf[TestDestinationProcessor], s"${name}-new")) + val confirmProbe = TestProbe() + + subscribeToConfirmation(confirmProbe) + + channel ! Deliver(Persistent("a"), destination) + channel ! Deliver(Persistent("boom"), destination) + channel ! Deliver(Persistent("b"), destination) + + awaitConfirmation(confirmProbe) + awaitConfirmation(confirmProbe) + } + "accept confirmable persistent messages for delivery" in { + val channel = system.actorOf(channelProps(s"${name}-channel-new")) + val destination = system.actorOf(Props[TestDestination]) + val confirmProbe = TestProbe() + + subscribeToConfirmation(confirmProbe) + + channel ! Deliver(PersistentRepr("a", confirmable = true), destination) + + expectMsgPF() { case m @ ConfirmablePersistent("a", _) ⇒ m.confirm() } + awaitConfirmation(confirmProbe) + } + } +} + +abstract class PersistentChannelSpec(config: Config) extends ChannelSpec(config) { + override def channelProps(channelId: String): Props = + PersistentChannel.props(channelId) + + override def subscribeToConfirmation(probe: TestProbe): Unit = + system.eventStream.subscribe(probe.ref, classOf[JournalProtocol.Delete]) + + override def awaitConfirmation(probe: TestProbe): Unit = + probe.expectMsgType[JournalProtocol.Delete] + + "A persistent channel" must { + "support disabling and re-enabling delivery" in { + val channel = system.actorOf(channelProps(s"${name}-channel")) + val confirmProbe = TestProbe() + + subscribeToConfirmation(confirmProbe) + + channel ! Deliver(Persistent("a"), testActor) + + expectMsgPF() { case m @ ConfirmablePersistent("a", _) ⇒ m.confirm() } + awaitConfirmation(confirmProbe) + + channel ! DisableDelivery + channel ! Deliver(Persistent("b"), testActor) + channel ! EnableDelivery + channel ! Deliver(Persistent("c"), testActor) + + expectMsgPF() { case m @ ConfirmablePersistent("b", _) ⇒ m.confirm() } + expectMsgPF() { case m @ ConfirmablePersistent("c", _) ⇒ m.confirm() } + } + "support Persistent replies to Deliver senders" in { + val channel = system.actorOf(PersistentChannel.props(s"${name}-channel-new", true)) + + channel ! Deliver(Persistent("a"), system.deadLetters) + expectMsgPF() { case Persistent("a", 1) ⇒ } + + channel ! Deliver(PersistentRepr("b", sequenceNr = 13), system.deadLetters) + expectMsgPF() { case Persistent("b", 13) ⇒ } + } + "must not modify certain persistent message field" in { + val channel = system.actorOf(channelProps(s"${name}-channel-new")) + val persistent1 = PersistentRepr(payload = "a", processorId = "p1", confirms = List("c1", "c2"), sender = channel, sequenceNr = 13) + val persistent2 = PersistentRepr(payload = "b", processorId = "p1", confirms = List("c1", "c2"), sender = channel) + + channel ! Deliver(persistent1, testActor) + channel ! Deliver(persistent2, testActor) + + expectMsgPF() { case ConfirmablePersistentImpl("a", 13, "p1", _, _, Seq("c1", "c2"), _, _, channel) ⇒ } + expectMsgPF() { case ConfirmablePersistentImpl("b", 2, "p1", _, _, Seq("c1", "c2"), _, _, channel) ⇒ } + } + } + + "A persistent channel" when { + "used standalone" must { + "redeliver un-confirmed stored messages during recovery" in { + val confirmProbe = TestProbe() + val forwardProbe = TestProbe() + + subscribeToConfirmation(confirmProbe) + + val channel1 = system.actorOf(channelProps(s"${name}-channel")) + channel1 tell (Deliver(Persistent("a1"), forwardProbe.ref), null) + channel1 tell (Deliver(Persistent("a2"), forwardProbe.ref), null) + + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a1", _) ⇒ /* no confirmation */ } + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a2", _) ⇒ m.confirm() } + + awaitConfirmation(confirmProbe) + + val channel2 = system.actorOf(channelProps(s"${name}-channel")) + channel2 tell (Deliver(Persistent("a3"), forwardProbe.ref), null) + + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a1", _) ⇒ m.confirm() } // sender still valid, no need to resolve + forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a3", _) ⇒ m.confirm() } + + awaitConfirmation(confirmProbe) + awaitConfirmation(confirmProbe) + } + } } } class LeveldbChannelSpec extends ChannelSpec(PersistenceSpec.config("leveldb", "channel")) class InmemChannelSpec extends ChannelSpec(PersistenceSpec.config("inmem", "channel")) + +class LeveldbPersistentChannelSpec extends PersistentChannelSpec(PersistenceSpec.config("leveldb", "persistent-channel")) +class InmemPersistentChannelSpec extends PersistentChannelSpec(PersistenceSpec.config("inmem", "persistent-channel")) diff --git a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala index 402770906b..09759d2b1c 100644 --- a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala @@ -76,8 +76,17 @@ class MessageSerializerPersistenceSpec extends AkkaSpec(config(customSerializers "A message serializer" when { "not given a manifest" must { - "handle custom persistent message serialization" in { - val persistent = PersistentImpl(MyPayload("a"), 13, "p1", "c1", true, true, Seq("c1", "c2"), Confirm("p2", 14, "c2"), testActor, testActor) + "handle custom ConfirmablePersistent message serialization" in { + val persistent = PersistentRepr(MyPayload("a"), 13, "p1", true, true, List("c1", "c2"), confirmable = true, Confirm("p2", 14, "c2"), testActor, testActor) + val serializer = serialization.findSerializerFor(persistent) + + val bytes = serializer.toBinary(persistent) + val deserialized = serializer.fromBinary(bytes, None) + + deserialized must be(persistent.withPayload(MyPayload(".a."))) + } + "handle custom Persistent message serialization" in { + val persistent = PersistentRepr(MyPayload("a"), 13, "p1", true, true, List("c1", "c2"), confirmable = false, Confirm("p2", 14, "c2"), testActor, testActor) val serializer = serialization.findSerializerFor(persistent) val bytes = serializer.toBinary(persistent) @@ -86,19 +95,28 @@ class MessageSerializerPersistenceSpec extends AkkaSpec(config(customSerializers deserialized must be(persistent.withPayload(MyPayload(".a."))) } } - "given a persistent message manifest" must { - "handle custom persistent message serialization" in { - val persistent = PersistentImpl(MyPayload("b"), 13, "p1", "c1", true, true, Seq("c1", "c2"), Confirm("p2", 14, "c2"), testActor, testActor) + "given a PersistentRepr manifest" must { + "handle custom ConfirmablePersistent message serialization" in { + val persistent = PersistentRepr(MyPayload("b"), 13, "p1", true, true, List("c1", "c2"), confirmable = true, Confirm("p2", 14, "c2"), testActor, testActor) val serializer = serialization.findSerializerFor(persistent) val bytes = serializer.toBinary(persistent) - val deserialized = serializer.fromBinary(bytes, Some(classOf[PersistentImpl])) + val deserialized = serializer.fromBinary(bytes, Some(classOf[PersistentRepr])) + + deserialized must be(persistent.withPayload(MyPayload(".b."))) + } + "handle custom Persistent message serialization" in { + val persistent = PersistentRepr(MyPayload("b"), 13, "p1", true, true, List("c1", "c2"), confirmable = true, Confirm("p2", 14, "c2"), testActor, testActor) + val serializer = serialization.findSerializerFor(persistent) + + val bytes = serializer.toBinary(persistent) + val deserialized = serializer.fromBinary(bytes, Some(classOf[PersistentRepr])) deserialized must be(persistent.withPayload(MyPayload(".b."))) } } - "given a confirmation message manifest" must { - "handle confirmation message serialization" in { + "given a Confirm manifest" must { + "handle Confirm message serialization" in { val confirmation = Confirm("x", 2, "y") val serializer = serialization.findSerializerFor(confirmation) @@ -120,8 +138,9 @@ object MessageSerializerRemotingSpec { class RemoteActor extends Actor { def receive = { - case PersistentBatch(Persistent(MyPayload(data), _) +: tail) ⇒ sender ! data - case Persistent(MyPayload(data), _) ⇒ sender ! data + case PersistentBatch(Persistent(MyPayload(data), _) +: tail) ⇒ sender ! s"b${data}" + case ConfirmablePersistent(MyPayload(data), _) ⇒ sender ! s"c${data}" + case Persistent(MyPayload(data), _) ⇒ sender ! s"p${data}" case Confirm(pid, snr, cid) ⇒ sender ! s"${pid},${snr},${cid}" } } @@ -146,15 +165,19 @@ class MessageSerializerRemotingSpec extends AkkaSpec(config(systemA).withFallbac } "A message serializer" must { - "custom-serialize persistent messages during remoting" in { + "custom-serialize Persistent messages during remoting" in { localActor ! Persistent(MyPayload("a")) - expectMsg(".a.") + expectMsg("p.a.") } - "custom-serialize persistent message batches during remoting" in { + "custom-serialize ConfirmablePersistent messages during remoting" in { + localActor ! PersistentRepr(MyPayload("a"), confirmable = true) + expectMsg("c.a.") + } + "custom-serialize Persistent message batches during remoting" in { localActor ! PersistentBatch(immutable.Seq(Persistent(MyPayload("a")))) - expectMsg(".a.") + expectMsg("b.a.") } - "serialize confirmation messages during remoting" in { + "serialize Confirm messages during remoting" in { localActor ! Confirm("a", 2, "b") expectMsg("a,2,b") } diff --git a/akka-samples/akka-sample-persistence/src/main/java/sample/persistence/japi/ProcessorChannelExample.java b/akka-samples/akka-sample-persistence/src/main/java/sample/persistence/japi/ProcessorChannelExample.java index c03c172ac1..8bcfc75679 100644 --- a/akka-samples/akka-sample-persistence/src/main/java/sample/persistence/japi/ProcessorChannelExample.java +++ b/akka-samples/akka-sample-persistence/src/main/java/sample/persistence/japi/ProcessorChannelExample.java @@ -30,8 +30,8 @@ public class ProcessorChannelExample { public static class ExampleDestination extends UntypedActor { @Override public void onReceive(Object message) throws Exception { - if (message instanceof Persistent) { - Persistent msg = (Persistent)message; + if (message instanceof ConfirmablePersistent) { + ConfirmablePersistent msg = (ConfirmablePersistent)message; msg.confirm(); System.out.println("received " + msg.payload()); } diff --git a/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ConversationRecoveryExample.scala b/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ConversationRecoveryExample.scala index 67919ed0be..89a869b276 100644 --- a/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ConversationRecoveryExample.scala +++ b/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ConversationRecoveryExample.scala @@ -16,14 +16,14 @@ object ConversationRecoveryExample extends App { var counter = 0 def receive = { - case m @ Persistent(Ping, _) ⇒ { + case m @ ConfirmablePersistent(Ping, _) ⇒ { counter += 1 println(s"received ping ${counter} times ...") m.confirm() - if (!recoveryRunning) Thread.sleep(2000) + if (!recoveryRunning) Thread.sleep(1000) pongChannel ! Deliver(m.withPayload(Pong), sender, Resolve.Destination) } - case "init" ⇒ if (counter == 0) self forward Persistent(Ping) + case "init" ⇒ if (counter == 0) pongChannel ! Deliver(Persistent(Pong), sender) } override def preStart() = () @@ -34,11 +34,11 @@ object ConversationRecoveryExample extends App { var counter = 0 def receive = { - case m @ Persistent(Pong, _) ⇒ { + case m @ ConfirmablePersistent(Pong, _) ⇒ { counter += 1 println(s"received pong ${counter} times ...") m.confirm() - if (!recoveryRunning) Thread.sleep(2000) + if (!recoveryRunning) Thread.sleep(1000) pingChannel ! Deliver(m.withPayload(Ping), sender, Resolve.Destination) } } diff --git a/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ProcessorChannelExample.scala b/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ProcessorChannelExample.scala index aac20be1be..cdac4e3716 100644 --- a/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ProcessorChannelExample.scala +++ b/akka-samples/akka-sample-persistence/src/main/scala/sample/persistence/ProcessorChannelExample.scala @@ -25,7 +25,7 @@ object ProcessorChannelExample extends App { class ExampleDestination extends Actor { def receive = { - case p @ Persistent(payload, snr) ⇒ { + case p @ ConfirmablePersistent(payload, snr) ⇒ { println(s"received ${payload}") sender ! s"re: ${payload} (${snr})" p.confirm()